var/home/core/zuul-output/0000755000175000017500000000000015115637750014537 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015115647441015501 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004707145315115647432017720 0ustar rootrootDec 08 21:18:37 crc systemd[1]: Starting Kubernetes Kubelet... Dec 08 21:18:37 crc restorecon[4692]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:37 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:38 crc restorecon[4692]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 21:18:38 crc restorecon[4692]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 08 21:18:38 crc kubenswrapper[4912]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 08 21:18:38 crc kubenswrapper[4912]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 08 21:18:38 crc kubenswrapper[4912]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 08 21:18:38 crc kubenswrapper[4912]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 08 21:18:38 crc kubenswrapper[4912]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 08 21:18:38 crc kubenswrapper[4912]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.261152 4912 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266212 4912 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266244 4912 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266252 4912 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266262 4912 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266273 4912 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266282 4912 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266291 4912 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266298 4912 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266304 4912 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266310 4912 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266316 4912 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266322 4912 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266327 4912 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266333 4912 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266338 4912 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266343 4912 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266349 4912 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266354 4912 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266360 4912 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266365 4912 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266370 4912 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266375 4912 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266382 4912 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266388 4912 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266395 4912 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266400 4912 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266407 4912 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266413 4912 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266420 4912 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266429 4912 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266437 4912 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266445 4912 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266452 4912 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266460 4912 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266466 4912 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266472 4912 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266478 4912 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266483 4912 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266489 4912 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266495 4912 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266500 4912 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266506 4912 feature_gate.go:330] unrecognized feature gate: Example Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266511 4912 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266516 4912 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266522 4912 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266527 4912 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266532 4912 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266538 4912 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266543 4912 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266548 4912 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266553 4912 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266559 4912 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266565 4912 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266573 4912 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266581 4912 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266588 4912 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266596 4912 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266603 4912 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266610 4912 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266616 4912 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266624 4912 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266631 4912 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266637 4912 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266644 4912 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266650 4912 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266656 4912 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266663 4912 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266671 4912 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266677 4912 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266691 4912 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.266701 4912 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267028 4912 flags.go:64] FLAG: --address="0.0.0.0" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267062 4912 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267075 4912 flags.go:64] FLAG: --anonymous-auth="true" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267084 4912 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267093 4912 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267100 4912 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267108 4912 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267117 4912 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267123 4912 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267130 4912 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267137 4912 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267143 4912 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267149 4912 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267156 4912 flags.go:64] FLAG: --cgroup-root="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267162 4912 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267168 4912 flags.go:64] FLAG: --client-ca-file="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267174 4912 flags.go:64] FLAG: --cloud-config="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267179 4912 flags.go:64] FLAG: --cloud-provider="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267185 4912 flags.go:64] FLAG: --cluster-dns="[]" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267193 4912 flags.go:64] FLAG: --cluster-domain="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267199 4912 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267205 4912 flags.go:64] FLAG: --config-dir="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267211 4912 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267219 4912 flags.go:64] FLAG: --container-log-max-files="5" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267228 4912 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267234 4912 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267241 4912 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267247 4912 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267283 4912 flags.go:64] FLAG: --contention-profiling="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267290 4912 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267297 4912 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267305 4912 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267312 4912 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267320 4912 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267329 4912 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267336 4912 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267342 4912 flags.go:64] FLAG: --enable-load-reader="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267348 4912 flags.go:64] FLAG: --enable-server="true" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267355 4912 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267363 4912 flags.go:64] FLAG: --event-burst="100" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267369 4912 flags.go:64] FLAG: --event-qps="50" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267375 4912 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267382 4912 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267387 4912 flags.go:64] FLAG: --eviction-hard="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267395 4912 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267402 4912 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267408 4912 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267414 4912 flags.go:64] FLAG: --eviction-soft="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267420 4912 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267426 4912 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267431 4912 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267437 4912 flags.go:64] FLAG: --experimental-mounter-path="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267443 4912 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267449 4912 flags.go:64] FLAG: --fail-swap-on="true" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267455 4912 flags.go:64] FLAG: --feature-gates="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267463 4912 flags.go:64] FLAG: --file-check-frequency="20s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267470 4912 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267476 4912 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267482 4912 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267488 4912 flags.go:64] FLAG: --healthz-port="10248" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267496 4912 flags.go:64] FLAG: --help="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267502 4912 flags.go:64] FLAG: --hostname-override="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267509 4912 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267515 4912 flags.go:64] FLAG: --http-check-frequency="20s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267522 4912 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267528 4912 flags.go:64] FLAG: --image-credential-provider-config="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267533 4912 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267539 4912 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267545 4912 flags.go:64] FLAG: --image-service-endpoint="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267551 4912 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267558 4912 flags.go:64] FLAG: --kube-api-burst="100" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267564 4912 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267572 4912 flags.go:64] FLAG: --kube-api-qps="50" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267578 4912 flags.go:64] FLAG: --kube-reserved="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267585 4912 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267590 4912 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267596 4912 flags.go:64] FLAG: --kubelet-cgroups="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267602 4912 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267608 4912 flags.go:64] FLAG: --lock-file="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267614 4912 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267621 4912 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267627 4912 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267641 4912 flags.go:64] FLAG: --log-json-split-stream="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267647 4912 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267654 4912 flags.go:64] FLAG: --log-text-split-stream="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267660 4912 flags.go:64] FLAG: --logging-format="text" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267666 4912 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267673 4912 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267678 4912 flags.go:64] FLAG: --manifest-url="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267684 4912 flags.go:64] FLAG: --manifest-url-header="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267692 4912 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267700 4912 flags.go:64] FLAG: --max-open-files="1000000" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267708 4912 flags.go:64] FLAG: --max-pods="110" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267716 4912 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267723 4912 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267731 4912 flags.go:64] FLAG: --memory-manager-policy="None" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267738 4912 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267745 4912 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267752 4912 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267758 4912 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267774 4912 flags.go:64] FLAG: --node-status-max-images="50" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267780 4912 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267787 4912 flags.go:64] FLAG: --oom-score-adj="-999" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267793 4912 flags.go:64] FLAG: --pod-cidr="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267798 4912 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267807 4912 flags.go:64] FLAG: --pod-manifest-path="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267814 4912 flags.go:64] FLAG: --pod-max-pids="-1" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267821 4912 flags.go:64] FLAG: --pods-per-core="0" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267827 4912 flags.go:64] FLAG: --port="10250" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267833 4912 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267839 4912 flags.go:64] FLAG: --provider-id="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267845 4912 flags.go:64] FLAG: --qos-reserved="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267850 4912 flags.go:64] FLAG: --read-only-port="10255" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267857 4912 flags.go:64] FLAG: --register-node="true" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267863 4912 flags.go:64] FLAG: --register-schedulable="true" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267869 4912 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267882 4912 flags.go:64] FLAG: --registry-burst="10" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267889 4912 flags.go:64] FLAG: --registry-qps="5" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267896 4912 flags.go:64] FLAG: --reserved-cpus="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267902 4912 flags.go:64] FLAG: --reserved-memory="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267910 4912 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267915 4912 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267922 4912 flags.go:64] FLAG: --rotate-certificates="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267928 4912 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267934 4912 flags.go:64] FLAG: --runonce="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267940 4912 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267946 4912 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267952 4912 flags.go:64] FLAG: --seccomp-default="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267959 4912 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267965 4912 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267972 4912 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267979 4912 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267987 4912 flags.go:64] FLAG: --storage-driver-password="root" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.267995 4912 flags.go:64] FLAG: --storage-driver-secure="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268002 4912 flags.go:64] FLAG: --storage-driver-table="stats" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268010 4912 flags.go:64] FLAG: --storage-driver-user="root" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268017 4912 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268025 4912 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268052 4912 flags.go:64] FLAG: --system-cgroups="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268061 4912 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268072 4912 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268078 4912 flags.go:64] FLAG: --tls-cert-file="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268085 4912 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268092 4912 flags.go:64] FLAG: --tls-min-version="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268098 4912 flags.go:64] FLAG: --tls-private-key-file="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268105 4912 flags.go:64] FLAG: --topology-manager-policy="none" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268111 4912 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268117 4912 flags.go:64] FLAG: --topology-manager-scope="container" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268123 4912 flags.go:64] FLAG: --v="2" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268133 4912 flags.go:64] FLAG: --version="false" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268141 4912 flags.go:64] FLAG: --vmodule="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268149 4912 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268155 4912 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268333 4912 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268340 4912 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268347 4912 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268353 4912 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268359 4912 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268365 4912 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268370 4912 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268376 4912 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268382 4912 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268387 4912 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268392 4912 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268397 4912 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268403 4912 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268408 4912 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268413 4912 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268418 4912 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268423 4912 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268428 4912 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268434 4912 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268439 4912 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268445 4912 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268451 4912 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268458 4912 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268465 4912 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268481 4912 feature_gate.go:330] unrecognized feature gate: Example Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268488 4912 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268494 4912 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268499 4912 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268505 4912 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268511 4912 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268517 4912 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268522 4912 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268530 4912 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268535 4912 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268540 4912 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268545 4912 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268551 4912 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268556 4912 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268562 4912 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268567 4912 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268572 4912 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268577 4912 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268581 4912 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268587 4912 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268592 4912 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268597 4912 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268603 4912 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268608 4912 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268613 4912 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268618 4912 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268623 4912 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268629 4912 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268633 4912 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268638 4912 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268644 4912 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268649 4912 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268659 4912 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268666 4912 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268671 4912 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268678 4912 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268684 4912 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268691 4912 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268697 4912 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268702 4912 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268711 4912 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268716 4912 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268722 4912 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268727 4912 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268732 4912 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268737 4912 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.268742 4912 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.268760 4912 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.280643 4912 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.280703 4912 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280840 4912 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280855 4912 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280863 4912 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280871 4912 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280878 4912 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280885 4912 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280892 4912 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280901 4912 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280914 4912 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280923 4912 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280932 4912 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280940 4912 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280946 4912 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280955 4912 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280963 4912 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280971 4912 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280979 4912 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280985 4912 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280992 4912 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.280999 4912 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281006 4912 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281013 4912 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281020 4912 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281027 4912 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281057 4912 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281065 4912 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281073 4912 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281079 4912 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281087 4912 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281095 4912 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281101 4912 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281110 4912 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281119 4912 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281126 4912 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281132 4912 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281139 4912 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281144 4912 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281151 4912 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281157 4912 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281164 4912 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281171 4912 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281177 4912 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281183 4912 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281190 4912 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281196 4912 feature_gate.go:330] unrecognized feature gate: Example Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281203 4912 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281209 4912 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281216 4912 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281222 4912 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281228 4912 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281235 4912 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281241 4912 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281247 4912 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281253 4912 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281259 4912 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281266 4912 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281272 4912 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281278 4912 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281284 4912 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281290 4912 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281297 4912 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281305 4912 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281312 4912 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281321 4912 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281328 4912 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281337 4912 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281346 4912 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281354 4912 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281361 4912 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281368 4912 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281375 4912 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.281386 4912 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281614 4912 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281629 4912 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281636 4912 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281643 4912 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281650 4912 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281656 4912 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281665 4912 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281674 4912 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281681 4912 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281688 4912 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281694 4912 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281701 4912 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281707 4912 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281713 4912 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281721 4912 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281727 4912 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281733 4912 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281740 4912 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281746 4912 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281753 4912 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281760 4912 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281767 4912 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281774 4912 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281784 4912 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281793 4912 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281801 4912 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281807 4912 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281814 4912 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281820 4912 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281827 4912 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281834 4912 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281841 4912 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281847 4912 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281854 4912 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281861 4912 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281870 4912 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281879 4912 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281888 4912 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281896 4912 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281904 4912 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281913 4912 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281921 4912 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281929 4912 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281936 4912 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281944 4912 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281952 4912 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281959 4912 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281966 4912 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281973 4912 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281981 4912 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281988 4912 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.281994 4912 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282001 4912 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282008 4912 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282014 4912 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282022 4912 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282029 4912 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282063 4912 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282070 4912 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282076 4912 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282085 4912 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282094 4912 feature_gate.go:330] unrecognized feature gate: Example Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282101 4912 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282108 4912 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282115 4912 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282121 4912 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282128 4912 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282134 4912 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282141 4912 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282148 4912 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.282156 4912 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.282169 4912 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.282811 4912 server.go:940] "Client rotation is on, will bootstrap in background" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.287000 4912 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.287222 4912 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.287933 4912 server.go:997] "Starting client certificate rotation" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.287961 4912 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.288309 4912 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-14 04:48:46.628693963 +0000 UTC Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.288400 4912 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 871h30m8.340296702s for next certificate rotation Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.297909 4912 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.300074 4912 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.307681 4912 log.go:25] "Validated CRI v1 runtime API" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.324396 4912 log.go:25] "Validated CRI v1 image API" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.326463 4912 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.330341 4912 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-08-21-14-16-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.330369 4912 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.351211 4912 manager.go:217] Machine: {Timestamp:2025-12-08 21:18:38.348640651 +0000 UTC m=+0.211642804 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:c284651b-3329-4e29-9d38-e509676bbc7f BootID:f3bfd3da-2c7d-428e-8d8d-fcbf1998f292 Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:ee:42:44 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:ee:42:44 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:34:c2:27 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:32:66:39 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:a3:37:ab Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:c7:1c:6d Speed:-1 Mtu:1496} {Name:eth10 MacAddress:0e:8e:5c:fa:5f:a0 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:36:c2:30:18:9e:56 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.351713 4912 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.351990 4912 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.352829 4912 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.353094 4912 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.353142 4912 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.353426 4912 topology_manager.go:138] "Creating topology manager with none policy" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.353440 4912 container_manager_linux.go:303] "Creating device plugin manager" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.353715 4912 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.353769 4912 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.354235 4912 state_mem.go:36] "Initialized new in-memory state store" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.354378 4912 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.355108 4912 kubelet.go:418] "Attempting to sync node with API server" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.355133 4912 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.355160 4912 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.355181 4912 kubelet.go:324] "Adding apiserver pod source" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.355196 4912 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.358482 4912 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.358703 4912 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.358778 4912 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.358865 4912 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.358945 4912 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.358991 4912 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.359961 4912 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360604 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360626 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360632 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360640 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360651 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360658 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360665 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360676 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360686 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360695 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360709 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360718 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.360916 4912 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.361489 4912 server.go:1280] "Started kubelet" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.361781 4912 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.361844 4912 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.361831 4912 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.362620 4912 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 08 21:18:38 crc systemd[1]: Started Kubernetes Kubelet. Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.363629 4912 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.224:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187f5a2ab140b87f default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-08 21:18:38.361458815 +0000 UTC m=+0.224460898,LastTimestamp:2025-12-08 21:18:38.361458815 +0000 UTC m=+0.224460898,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.364176 4912 server.go:460] "Adding debug handlers to kubelet server" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.365515 4912 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.365559 4912 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.365875 4912 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 02:28:08.949764849 +0000 UTC Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.365902 4912 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.366184 4912 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.366288 4912 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.368620 4912 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.370393 4912 factory.go:55] Registering systemd factory Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.370468 4912 factory.go:221] Registration of the systemd container factory successfully Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.369755 4912 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="200ms" Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.371077 4912 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.371200 4912 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.371982 4912 factory.go:153] Registering CRI-O factory Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.372014 4912 factory.go:221] Registration of the crio container factory successfully Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.372126 4912 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.372153 4912 factory.go:103] Registering Raw factory Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.372179 4912 manager.go:1196] Started watching for new ooms in manager Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.373421 4912 manager.go:319] Starting recovery of all containers Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383104 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383274 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383290 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383304 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383348 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383365 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383378 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383392 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383408 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383420 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383434 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383452 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383466 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383541 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383557 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383570 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383585 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383598 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383611 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383623 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383636 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383650 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383663 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383680 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383697 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383712 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383751 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383769 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383785 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383829 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383844 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383858 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383873 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383888 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383902 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383916 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383960 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383975 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.383990 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384005 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384021 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384051 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384067 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384083 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384099 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384137 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384154 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384168 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384183 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384196 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384210 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384227 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384249 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384265 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384280 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384294 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384309 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384324 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384337 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384352 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384363 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384373 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384384 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384395 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384405 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384415 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384424 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384434 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384445 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384488 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384504 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384518 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384529 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384540 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384549 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384562 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384572 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384584 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384593 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384605 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384616 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384625 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384637 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384647 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384656 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384669 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384682 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384695 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384706 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384715 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384725 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384736 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384746 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384755 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384765 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384774 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384784 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384795 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384807 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384818 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384828 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384839 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384849 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384858 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384874 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384885 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384896 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384909 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384919 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384929 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384940 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384951 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384961 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384972 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384982 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.384995 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385004 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385015 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385024 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385050 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385060 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385068 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385078 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385089 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385099 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385108 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385118 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385129 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385139 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385149 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385160 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385171 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385180 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385191 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385201 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385212 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385223 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385233 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385243 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385254 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385264 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385275 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385284 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385294 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385305 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385315 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385325 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385335 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385347 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385358 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385367 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385377 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385387 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385396 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385407 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385418 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385428 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385438 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385448 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385459 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385469 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385479 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385493 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385502 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385512 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385522 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385532 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385542 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385552 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385562 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385573 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385584 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385594 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.385605 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386221 4912 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386253 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386263 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386274 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386286 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386296 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386305 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386314 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386323 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386333 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386343 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386354 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386366 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386378 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386388 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386398 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386407 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386417 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386427 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386437 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386448 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386461 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386473 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386482 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386492 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386501 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386511 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386520 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386529 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386545 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386558 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386570 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386581 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386593 4912 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386602 4912 reconstruct.go:97] "Volume reconstruction finished" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.386609 4912 reconciler.go:26] "Reconciler: start to sync state" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.405918 4912 manager.go:324] Recovery completed Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.419200 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.423396 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.423300 4912 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.423450 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.423462 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.426476 4912 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.426512 4912 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.426537 4912 kubelet.go:2335] "Starting kubelet main sync loop" Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.426581 4912 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.426741 4912 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.426807 4912 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.426872 4912 state_mem.go:36] "Initialized new in-memory state store" Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.427426 4912 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.428383 4912 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.435624 4912 policy_none.go:49] "None policy: Start" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.436881 4912 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.437004 4912 state_mem.go:35] "Initializing new in-memory state store" Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.466297 4912 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.496488 4912 manager.go:334] "Starting Device Plugin manager" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.496551 4912 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.496568 4912 server.go:79] "Starting device plugin registration server" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.497157 4912 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.497177 4912 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.497423 4912 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.497526 4912 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.497538 4912 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.503888 4912 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.526865 4912 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.527002 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.528213 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.528266 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.528281 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.528844 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.528896 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.529471 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.529997 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.530052 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.530066 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.530975 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.531011 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.531024 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.531223 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.531419 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.531459 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.533021 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.533069 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.533082 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.533118 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.533144 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.533157 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.533200 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.533368 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.533403 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.533989 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.534017 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.534029 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.534179 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.534219 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.534233 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.534241 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.534310 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.534342 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.535434 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.535464 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.535475 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.535483 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.535490 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.535496 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.535629 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.535652 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.536436 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.536480 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.536492 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.571456 4912 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="400ms" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588197 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588239 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588264 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588287 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588306 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588329 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588411 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588475 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588518 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588552 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588581 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588613 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588646 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588689 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.588733 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.597465 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.598824 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.599165 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.599188 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.599213 4912 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.599664 4912 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690580 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690650 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690671 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690687 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690704 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690719 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690734 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690749 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690795 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690812 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690828 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690847 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690861 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690855 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690941 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690954 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690876 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.691133 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690925 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.691159 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.691184 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.691206 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.690910 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.691211 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.691236 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.691258 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.691256 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.691279 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.691324 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.691209 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.800450 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.805980 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.806050 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.806066 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.806161 4912 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.807464 4912 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.854657 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.863336 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.883794 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.903700 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: I1208 21:18:38.911087 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.913443 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-371942f26245a62d45bfa69e44373318351ac0eaeac3080c3d61e00ee653d1b7 WatchSource:0}: Error finding container 371942f26245a62d45bfa69e44373318351ac0eaeac3080c3d61e00ee653d1b7: Status 404 returned error can't find the container with id 371942f26245a62d45bfa69e44373318351ac0eaeac3080c3d61e00ee653d1b7 Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.919477 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-ded3a4ba5a35b9dc25012491f5e4dbb493f52ffc6eea112988ce1ea15586855e WatchSource:0}: Error finding container ded3a4ba5a35b9dc25012491f5e4dbb493f52ffc6eea112988ce1ea15586855e: Status 404 returned error can't find the container with id ded3a4ba5a35b9dc25012491f5e4dbb493f52ffc6eea112988ce1ea15586855e Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.919982 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-992c86a58f2d4f21ee9605bba0f00d6f77aaba574f4f3293f48d3a5cc9885cc0 WatchSource:0}: Error finding container 992c86a58f2d4f21ee9605bba0f00d6f77aaba574f4f3293f48d3a5cc9885cc0: Status 404 returned error can't find the container with id 992c86a58f2d4f21ee9605bba0f00d6f77aaba574f4f3293f48d3a5cc9885cc0 Dec 08 21:18:38 crc kubenswrapper[4912]: W1208 21:18:38.939262 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-23c933c85919347f3a6324b37b3ff99197ef67f0c2fc91b9c9acbec24a77cdfa WatchSource:0}: Error finding container 23c933c85919347f3a6324b37b3ff99197ef67f0c2fc91b9c9acbec24a77cdfa: Status 404 returned error can't find the container with id 23c933c85919347f3a6324b37b3ff99197ef67f0c2fc91b9c9acbec24a77cdfa Dec 08 21:18:38 crc kubenswrapper[4912]: E1208 21:18:38.973362 4912 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="800ms" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.208658 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.210125 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.210179 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.210195 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.210230 4912 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:18:39 crc kubenswrapper[4912]: E1208 21:18:39.210752 4912 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Dec 08 21:18:39 crc kubenswrapper[4912]: W1208 21:18:39.245507 4912 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Dec 08 21:18:39 crc kubenswrapper[4912]: E1208 21:18:39.245611 4912 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.363798 4912 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.366814 4912 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 06:15:23.50940589 +0000 UTC Dec 08 21:18:39 crc kubenswrapper[4912]: W1208 21:18:39.389393 4912 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Dec 08 21:18:39 crc kubenswrapper[4912]: E1208 21:18:39.389495 4912 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.437058 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6"} Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.437164 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"992c86a58f2d4f21ee9605bba0f00d6f77aaba574f4f3293f48d3a5cc9885cc0"} Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.439324 4912 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b" exitCode=0 Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.439387 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b"} Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.439448 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ded3a4ba5a35b9dc25012491f5e4dbb493f52ffc6eea112988ce1ea15586855e"} Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.439565 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.441106 4912 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087" exitCode=0 Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.441174 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087"} Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.441193 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"96249b4317c947ddf3f7109b3975bea16fb8ddf177f585645f3e47c1f2502231"} Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.441494 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.442133 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.442157 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.442166 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.442430 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.442449 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.442458 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.443447 4912 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817" exitCode=0 Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.443498 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817"} Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.443515 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"371942f26245a62d45bfa69e44373318351ac0eaeac3080c3d61e00ee653d1b7"} Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.443610 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.444696 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.447261 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.447371 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.447395 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.447934 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.448005 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.448060 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.448853 4912 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="a45879f8e62cef7d358e00179b268b0a13b8b8e9ec6fdf6acddc1e46fc7aa947" exitCode=0 Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.448941 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"a45879f8e62cef7d358e00179b268b0a13b8b8e9ec6fdf6acddc1e46fc7aa947"} Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.448983 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"23c933c85919347f3a6324b37b3ff99197ef67f0c2fc91b9c9acbec24a77cdfa"} Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.449718 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.451105 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.451135 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:39 crc kubenswrapper[4912]: I1208 21:18:39.451149 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:39 crc kubenswrapper[4912]: W1208 21:18:39.615901 4912 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Dec 08 21:18:39 crc kubenswrapper[4912]: E1208 21:18:39.616125 4912 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:39 crc kubenswrapper[4912]: W1208 21:18:39.752921 4912 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Dec 08 21:18:39 crc kubenswrapper[4912]: E1208 21:18:39.753092 4912 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Dec 08 21:18:39 crc kubenswrapper[4912]: E1208 21:18:39.774674 4912 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="1.6s" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.011243 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.012502 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.012535 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.012547 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.012579 4912 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:18:40 crc kubenswrapper[4912]: E1208 21:18:40.013179 4912 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.367564 4912 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 12:08:11.765819108 +0000 UTC Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.367636 4912 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 206h49m31.398186396s for next certificate rotation Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.459402 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"85d03388b2cf2263dae4692f2641b08edc034f489e16d1ec0e132ab65b17358a"} Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.459469 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e5651df48bf6073c8643ada32272a1913692902a285c45457d7964333c863ece"} Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.459486 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d281679731c452ae40fa10c386d51c23b18fc26dfb3ce8783107c47e6a667bac"} Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.459618 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.465257 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.465342 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.465364 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.474927 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2"} Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.474985 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47"} Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.474996 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e"} Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.475006 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8"} Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.477207 4912 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea" exitCode=0 Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.477288 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea"} Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.477526 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.478493 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.478526 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.478539 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.479457 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"19ba881c167a017a8b1aa79696550cac4b2d6e2d8bdbc6e029da49bbc9393b1e"} Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.479529 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.480719 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.480745 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.480756 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.482463 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca"} Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.482490 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401"} Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.482502 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731"} Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.482555 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.483194 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.483217 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:40 crc kubenswrapper[4912]: I1208 21:18:40.483229 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.491489 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206"} Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.491608 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.493627 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.493701 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.493720 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.494924 4912 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674" exitCode=0 Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.494974 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674"} Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.495098 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.495126 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.495204 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.497808 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.497849 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.497861 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.498680 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.498742 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.498763 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.498689 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.498931 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.498979 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.614137 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.615950 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.616022 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.616077 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:41 crc kubenswrapper[4912]: I1208 21:18:41.616130 4912 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.196398 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.210014 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.502965 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e"} Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.503838 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440"} Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.503882 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b"} Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.503079 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.503150 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.505641 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.505686 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.505701 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.506179 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.506230 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:42 crc kubenswrapper[4912]: I1208 21:18:42.506244 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4912]: I1208 21:18:43.511123 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb"} Dec 08 21:18:43 crc kubenswrapper[4912]: I1208 21:18:43.511197 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4912]: I1208 21:18:43.511214 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:43 crc kubenswrapper[4912]: I1208 21:18:43.511227 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160"} Dec 08 21:18:43 crc kubenswrapper[4912]: I1208 21:18:43.512534 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4912]: I1208 21:18:43.512589 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4912]: I1208 21:18:43.512608 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4912]: I1208 21:18:43.513227 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:43 crc kubenswrapper[4912]: I1208 21:18:43.513263 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:43 crc kubenswrapper[4912]: I1208 21:18:43.513275 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:43 crc kubenswrapper[4912]: I1208 21:18:43.825418 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:44 crc kubenswrapper[4912]: I1208 21:18:44.514546 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:44 crc kubenswrapper[4912]: I1208 21:18:44.514555 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:44 crc kubenswrapper[4912]: I1208 21:18:44.516088 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:44 crc kubenswrapper[4912]: I1208 21:18:44.516122 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:44 crc kubenswrapper[4912]: I1208 21:18:44.516133 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:44 crc kubenswrapper[4912]: I1208 21:18:44.516338 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:44 crc kubenswrapper[4912]: I1208 21:18:44.516386 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:44 crc kubenswrapper[4912]: I1208 21:18:44.516405 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:45 crc kubenswrapper[4912]: I1208 21:18:45.154755 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:45 crc kubenswrapper[4912]: I1208 21:18:45.517852 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:45 crc kubenswrapper[4912]: I1208 21:18:45.519106 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:45 crc kubenswrapper[4912]: I1208 21:18:45.519181 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:45 crc kubenswrapper[4912]: I1208 21:18:45.519196 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:46 crc kubenswrapper[4912]: I1208 21:18:46.310395 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:46 crc kubenswrapper[4912]: I1208 21:18:46.310632 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:46 crc kubenswrapper[4912]: I1208 21:18:46.312552 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:46 crc kubenswrapper[4912]: I1208 21:18:46.312591 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:46 crc kubenswrapper[4912]: I1208 21:18:46.312606 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:47 crc kubenswrapper[4912]: I1208 21:18:47.323023 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 08 21:18:47 crc kubenswrapper[4912]: I1208 21:18:47.323294 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:47 crc kubenswrapper[4912]: I1208 21:18:47.324909 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:47 crc kubenswrapper[4912]: I1208 21:18:47.324966 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:47 crc kubenswrapper[4912]: I1208 21:18:47.324987 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:48 crc kubenswrapper[4912]: I1208 21:18:48.222569 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:48 crc kubenswrapper[4912]: I1208 21:18:48.222890 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:48 crc kubenswrapper[4912]: I1208 21:18:48.225161 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:48 crc kubenswrapper[4912]: I1208 21:18:48.225219 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:48 crc kubenswrapper[4912]: I1208 21:18:48.225237 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:48 crc kubenswrapper[4912]: E1208 21:18:48.503991 4912 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 08 21:18:49 crc kubenswrapper[4912]: I1208 21:18:49.310988 4912 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 08 21:18:49 crc kubenswrapper[4912]: I1208 21:18:49.311156 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.089306 4912 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.089440 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.190088 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.190345 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.192184 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.192305 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.192376 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.205388 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.335570 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.335943 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.337955 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.338012 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.338028 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.364717 4912 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.531666 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.533171 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.533244 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.533268 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:50 crc kubenswrapper[4912]: I1208 21:18:50.536832 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:51 crc kubenswrapper[4912]: E1208 21:18:51.376861 4912 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 08 21:18:51 crc kubenswrapper[4912]: E1208 21:18:51.497112 4912 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.187f5a2ab140b87f default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-08 21:18:38.361458815 +0000 UTC m=+0.224460898,LastTimestamp:2025-12-08 21:18:38.361458815 +0000 UTC m=+0.224460898,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.534428 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.535691 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.535722 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.535731 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:51 crc kubenswrapper[4912]: W1208 21:18:51.566429 4912 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.566564 4912 trace.go:236] Trace[277413589]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Dec-2025 21:18:41.564) (total time: 10001ms): Dec 08 21:18:51 crc kubenswrapper[4912]: Trace[277413589]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (21:18:51.566) Dec 08 21:18:51 crc kubenswrapper[4912]: Trace[277413589]: [10.001684315s] [10.001684315s] END Dec 08 21:18:51 crc kubenswrapper[4912]: E1208 21:18:51.566595 4912 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 08 21:18:51 crc kubenswrapper[4912]: E1208 21:18:51.618203 4912 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.744688 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.744910 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.748753 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.748797 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.748808 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.785533 4912 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.785619 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.791114 4912 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Dec 08 21:18:51 crc kubenswrapper[4912]: I1208 21:18:51.791176 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 08 21:18:53 crc kubenswrapper[4912]: I1208 21:18:53.834931 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:53 crc kubenswrapper[4912]: I1208 21:18:53.835265 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:53 crc kubenswrapper[4912]: I1208 21:18:53.837343 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:53 crc kubenswrapper[4912]: I1208 21:18:53.837434 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:53 crc kubenswrapper[4912]: I1208 21:18:53.837460 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:53 crc kubenswrapper[4912]: I1208 21:18:53.842767 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:54 crc kubenswrapper[4912]: I1208 21:18:54.546809 4912 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:18:54 crc kubenswrapper[4912]: I1208 21:18:54.546883 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:54 crc kubenswrapper[4912]: I1208 21:18:54.548310 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:54 crc kubenswrapper[4912]: I1208 21:18:54.548399 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:54 crc kubenswrapper[4912]: I1208 21:18:54.548426 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:54 crc kubenswrapper[4912]: I1208 21:18:54.818406 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:18:54 crc kubenswrapper[4912]: I1208 21:18:54.820406 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:18:54 crc kubenswrapper[4912]: I1208 21:18:54.820451 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:18:54 crc kubenswrapper[4912]: I1208 21:18:54.820468 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:18:54 crc kubenswrapper[4912]: I1208 21:18:54.820500 4912 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:18:54 crc kubenswrapper[4912]: E1208 21:18:54.824904 4912 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.778280 4912 trace.go:236] Trace[907436929]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Dec-2025 21:18:42.197) (total time: 14580ms): Dec 08 21:18:56 crc kubenswrapper[4912]: Trace[907436929]: ---"Objects listed" error: 14580ms (21:18:56.778) Dec 08 21:18:56 crc kubenswrapper[4912]: Trace[907436929]: [14.580835682s] [14.580835682s] END Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.778314 4912 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.781934 4912 trace.go:236] Trace[1181368192]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Dec-2025 21:18:42.837) (total time: 13944ms): Dec 08 21:18:56 crc kubenswrapper[4912]: Trace[1181368192]: ---"Objects listed" error: 13944ms (21:18:56.781) Dec 08 21:18:56 crc kubenswrapper[4912]: Trace[1181368192]: [13.944163879s] [13.944163879s] END Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.781972 4912 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.782528 4912 trace.go:236] Trace[1509888169]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Dec-2025 21:18:41.794) (total time: 14987ms): Dec 08 21:18:56 crc kubenswrapper[4912]: Trace[1509888169]: ---"Objects listed" error: 14987ms (21:18:56.782) Dec 08 21:18:56 crc kubenswrapper[4912]: Trace[1509888169]: [14.987506464s] [14.987506464s] END Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.782554 4912 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.782528 4912 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.833072 4912 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:49372->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.833206 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:49372->192.168.126.11:17697: read: connection reset by peer" Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.833580 4912 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.833627 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.870510 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:56 crc kubenswrapper[4912]: I1208 21:18:56.889871 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.362894 4912 apiserver.go:52] "Watching apiserver" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.387625 4912 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.387984 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-q6mfz","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.388637 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-q6mfz" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.388925 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.389150 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.389266 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.389389 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.389478 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.389574 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.389617 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.389696 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.390000 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.391661 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.393136 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.393362 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.393812 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.394092 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.394216 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.394286 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.395733 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.395822 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.395744 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.395995 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.396904 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.411075 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.424619 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.440577 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.452514 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.463136 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.470619 4912 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.475394 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487460 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487509 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487528 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487551 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487572 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487592 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487610 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487625 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487644 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487664 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487681 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487699 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487720 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487738 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487761 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487782 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487805 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487828 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487847 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487871 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487898 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487923 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487949 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.487974 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488002 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488060 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488091 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488117 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488144 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488169 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488195 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488222 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488252 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488279 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488321 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488351 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488374 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488427 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488456 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488482 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488511 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488538 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488563 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488589 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488616 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488641 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488688 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488716 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488745 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488784 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488813 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488839 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488870 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488904 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488931 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488956 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.488986 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489016 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489062 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489092 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489118 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489147 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489173 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489198 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489228 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489255 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489297 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489324 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489353 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489384 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489413 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489440 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489495 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489523 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489551 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489579 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489605 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489631 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489657 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489686 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489718 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489744 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489769 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489797 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489826 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489855 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489894 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489923 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489950 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.489977 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490006 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490033 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490077 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490107 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490136 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490168 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490198 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490223 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490250 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490276 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490301 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490328 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490355 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490382 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490408 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490435 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490461 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490486 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490516 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490547 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490549 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490577 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490610 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490640 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490668 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490697 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490737 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490739 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490815 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490863 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490891 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490921 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490944 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490964 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.490986 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491011 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491054 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491078 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491100 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491124 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491148 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491170 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491197 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491220 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491337 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491362 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491394 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491433 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491458 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491492 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491523 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491550 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491581 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491605 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491629 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491659 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491682 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491705 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491732 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491758 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491783 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491809 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491835 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491858 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491881 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491910 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491940 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491967 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.491994 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492018 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492062 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492087 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492112 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492138 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492166 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492192 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492217 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492239 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492266 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492292 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492314 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492336 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492362 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492402 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492427 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492451 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492449 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492474 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492571 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492599 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492625 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492647 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492667 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492692 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492693 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492715 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492741 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492766 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492785 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492805 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492824 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492844 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492865 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492873 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492915 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492944 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492964 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.492984 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493003 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493022 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493140 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493380 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493418 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493453 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493502 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493525 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493552 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493579 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493601 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493624 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493649 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493669 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493689 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/b73fac72-73a2-42cf-8d43-6aa187f7ba9c-hosts-file\") pod \"node-resolver-q6mfz\" (UID: \"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\") " pod="openshift-dns/node-resolver-q6mfz" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493707 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvxbk\" (UniqueName: \"kubernetes.io/projected/b73fac72-73a2-42cf-8d43-6aa187f7ba9c-kube-api-access-bvxbk\") pod \"node-resolver-q6mfz\" (UID: \"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\") " pod="openshift-dns/node-resolver-q6mfz" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493728 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493746 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493764 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493853 4912 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493868 4912 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493881 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493895 4912 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.493908 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.495876 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.495970 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.496072 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.496304 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.496347 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.496537 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.496588 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.496767 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.496814 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.496852 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.497047 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.505167 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.505206 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.505427 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.505791 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.506028 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.506142 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.506248 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.506358 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.506430 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.506565 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.506765 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.506990 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.507486 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.508520 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.513872 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.513932 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.514310 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.514313 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.514465 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.514800 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.514872 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.515230 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.516106 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.516406 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.519298 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.519369 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.519529 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:18:58.01949682 +0000 UTC m=+19.882499083 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.519711 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.519909 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.520884 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.521399 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.521653 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.527891 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.528398 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.528434 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.528761 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.529442 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.529774 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.530128 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.530293 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.530604 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.530651 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.531339 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.531616 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.531745 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.532501 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.532738 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.532840 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.533389 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-rp5rf"] Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.533653 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-9vfng"] Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.534201 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.534422 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.534604 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-74dp4"] Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.534838 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.535117 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.535294 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.535642 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.535754 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.535941 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.537976 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.538302 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.538334 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.538614 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.538765 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.539392 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.539461 4912 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.539937 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.540242 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.540967 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.541070 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.543570 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.543629 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.543860 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.530792 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.547370 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.547478 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.547782 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.552994 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.553135 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.554368 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.554738 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.555002 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.555311 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.555468 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.555674 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.555817 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.555880 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.555925 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.573526 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.573773 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.579016 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.579159 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.579419 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.579457 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.579515 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.579694 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.579923 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.581537 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.581728 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.581824 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.556109 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.556265 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.557212 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.557446 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.557946 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.559307 4912 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.582401 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:18:58.082365835 +0000 UTC m=+19.945368118 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.582454 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.582464 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.559531 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.560190 4912 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.560220 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.562545 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.562785 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.562876 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.582694 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.582907 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.583016 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:18:58.082996361 +0000 UTC m=+19.945998444 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.559341 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.583121 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.583138 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.583397 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.583520 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.583706 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.583938 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.584091 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.583762 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.584332 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.584453 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.584520 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.584819 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.585207 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.585416 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.585413 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.585533 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.585650 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.585821 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.586026 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.586388 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.586677 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.587166 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.587544 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.589231 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.592790 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.586024 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.586072 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.597337 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.597441 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.597905 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.598108 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.598489 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.598901 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.599107 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.599348 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.599402 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.599604 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.599688 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.599695 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.599819 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.599954 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.599992 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600023 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600388 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-run-netns\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600440 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/935c51ff-7414-4687-be92-cda52803d7b0-cni-binary-copy\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600479 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/935c51ff-7414-4687-be92-cda52803d7b0-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600516 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600544 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/935c51ff-7414-4687-be92-cda52803d7b0-system-cni-dir\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600572 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/935c51ff-7414-4687-be92-cda52803d7b0-os-release\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600612 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-cnibin\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600645 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/935c51ff-7414-4687-be92-cda52803d7b0-tuning-conf-dir\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600692 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-etc-kubernetes\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600750 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/b73fac72-73a2-42cf-8d43-6aa187f7ba9c-hosts-file\") pod \"node-resolver-q6mfz\" (UID: \"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\") " pod="openshift-dns/node-resolver-q6mfz" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600784 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-var-lib-cni-multus\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600810 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/831b06bd-095f-439f-a166-088c2d584933-proxy-tls\") pod \"machine-config-daemon-74dp4\" (UID: \"831b06bd-095f-439f-a166-088c2d584933\") " pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600690 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600761 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.600942 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601132 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvxbk\" (UniqueName: \"kubernetes.io/projected/b73fac72-73a2-42cf-8d43-6aa187f7ba9c-kube-api-access-bvxbk\") pod \"node-resolver-q6mfz\" (UID: \"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\") " pod="openshift-dns/node-resolver-q6mfz" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601189 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/959add28-5508-49d7-8fe3-404acef398b0-multus-daemon-config\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601139 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/b73fac72-73a2-42cf-8d43-6aa187f7ba9c-hosts-file\") pod \"node-resolver-q6mfz\" (UID: \"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\") " pod="openshift-dns/node-resolver-q6mfz" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601309 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx9wb\" (UniqueName: \"kubernetes.io/projected/935c51ff-7414-4687-be92-cda52803d7b0-kube-api-access-cx9wb\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601367 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/935c51ff-7414-4687-be92-cda52803d7b0-cnibin\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601363 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601427 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/831b06bd-095f-439f-a166-088c2d584933-mcd-auth-proxy-config\") pod \"machine-config-daemon-74dp4\" (UID: \"831b06bd-095f-439f-a166-088c2d584933\") " pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601466 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fxph\" (UniqueName: \"kubernetes.io/projected/831b06bd-095f-439f-a166-088c2d584933-kube-api-access-7fxph\") pod \"machine-config-daemon-74dp4\" (UID: \"831b06bd-095f-439f-a166-088c2d584933\") " pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601543 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601556 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601593 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-multus-socket-dir-parent\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601623 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601645 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-hostroot\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601685 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-multus-conf-dir\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601775 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-os-release\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.619919 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/959add28-5508-49d7-8fe3-404acef398b0-cni-binary-copy\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.619971 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-var-lib-kubelet\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.619963 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.601746 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.619996 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-multus-cni-dir\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.619805 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.619836 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.620162 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-run-k8s-cni-cncf-io\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.620211 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/831b06bd-095f-439f-a166-088c2d584933-rootfs\") pod \"machine-config-daemon-74dp4\" (UID: \"831b06bd-095f-439f-a166-088c2d584933\") " pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.620277 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vm968\" (UniqueName: \"kubernetes.io/projected/959add28-5508-49d7-8fe3-404acef398b0-kube-api-access-vm968\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.620311 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-system-cni-dir\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.620340 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-var-lib-cni-bin\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.620401 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-run-multus-certs\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.620710 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.621273 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.621709 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.621952 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.621934 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.622166 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.622227 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.622893 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.623061 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.623087 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.623336 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.623434 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.623456 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.623476 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.623502 4912 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.623545 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.623557 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.623573 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:18:58.123546016 +0000 UTC m=+19.986548099 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.623685 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.623682 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.623727 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.623897 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.623438 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.624148 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.624179 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.624220 4912 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.624310 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:18:58.124284105 +0000 UTC m=+19.987286188 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.624316 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.624848 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.624933 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.627013 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.627496 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.627881 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.628101 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.628151 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.628191 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.628421 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.628759 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.628779 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.630101 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.630380 4912 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.630408 4912 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.630426 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.630444 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.630457 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.630469 4912 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.630482 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.630498 4912 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.630511 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.631284 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.631398 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.631467 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.631598 4912 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.631698 4912 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.631796 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632147 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632279 4912 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632307 4912 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632340 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632352 4912 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632362 4912 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632377 4912 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632388 4912 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632397 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632429 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632443 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632452 4912 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632462 4912 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632472 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632507 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632519 4912 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632529 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632542 4912 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632552 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632580 4912 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632591 4912 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632604 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632615 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632626 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632655 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632669 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632679 4912 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632689 4912 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632702 4912 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632731 4912 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632744 4912 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632757 4912 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632770 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632783 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632810 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632821 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632835 4912 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632846 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632856 4912 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632866 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632897 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632908 4912 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632917 4912 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632930 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.632939 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.633141 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.633165 4912 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.633178 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.633190 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.633208 4912 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.633221 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.633829 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.633912 4912 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.633943 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.633960 4912 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.635511 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.635758 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.635861 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.636681 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.637218 4912 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206" exitCode=255 Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.637305 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206"} Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.637511 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.640116 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.633983 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.642166 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.642164 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvxbk\" (UniqueName: \"kubernetes.io/projected/b73fac72-73a2-42cf-8d43-6aa187f7ba9c-kube-api-access-bvxbk\") pod \"node-resolver-q6mfz\" (UID: \"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\") " pod="openshift-dns/node-resolver-q6mfz" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.642182 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.642286 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.642313 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.642328 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.642351 4912 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.642371 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.642386 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644233 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644324 4912 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644379 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644432 4912 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644484 4912 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644548 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644611 4912 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644671 4912 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644731 4912 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644785 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644842 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644906 4912 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.644965 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645021 4912 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645100 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645153 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645213 4912 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645349 4912 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645413 4912 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645469 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645521 4912 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645603 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645662 4912 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645719 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645777 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645834 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645886 4912 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645944 4912 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646000 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646080 4912 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646151 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646325 4912 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646390 4912 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646446 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646502 4912 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646554 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646605 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646655 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646718 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646778 4912 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646834 4912 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646892 4912 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.646949 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647005 4912 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647116 4912 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647176 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647233 4912 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647289 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647349 4912 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647410 4912 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647487 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647552 4912 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647608 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647671 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647722 4912 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647784 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647837 4912 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647887 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647937 4912 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.647989 4912 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648081 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648138 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648194 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648250 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648302 4912 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648355 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648410 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648465 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648518 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648574 4912 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648633 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648690 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648745 4912 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.645172 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.648891 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: E1208 21:18:57.650045 4912 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.650821 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.651410 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.660562 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.667793 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.671571 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.675509 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.682057 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.693834 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.706175 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-q6mfz" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.713024 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.714634 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.718564 4912 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.722374 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.723285 4912 scope.go:117] "RemoveContainer" containerID="2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.723695 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.728218 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.730619 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.743091 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749164 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/935c51ff-7414-4687-be92-cda52803d7b0-tuning-conf-dir\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749213 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-cnibin\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749257 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-etc-kubernetes\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749285 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/831b06bd-095f-439f-a166-088c2d584933-proxy-tls\") pod \"machine-config-daemon-74dp4\" (UID: \"831b06bd-095f-439f-a166-088c2d584933\") " pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749322 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-var-lib-cni-multus\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749346 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx9wb\" (UniqueName: \"kubernetes.io/projected/935c51ff-7414-4687-be92-cda52803d7b0-kube-api-access-cx9wb\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749388 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/959add28-5508-49d7-8fe3-404acef398b0-multus-daemon-config\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749414 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/935c51ff-7414-4687-be92-cda52803d7b0-cnibin\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749444 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/831b06bd-095f-439f-a166-088c2d584933-mcd-auth-proxy-config\") pod \"machine-config-daemon-74dp4\" (UID: \"831b06bd-095f-439f-a166-088c2d584933\") " pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749470 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fxph\" (UniqueName: \"kubernetes.io/projected/831b06bd-095f-439f-a166-088c2d584933-kube-api-access-7fxph\") pod \"machine-config-daemon-74dp4\" (UID: \"831b06bd-095f-439f-a166-088c2d584933\") " pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749494 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-hostroot\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749530 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-multus-socket-dir-parent\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749556 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-multus-conf-dir\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749583 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-os-release\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749639 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/959add28-5508-49d7-8fe3-404acef398b0-cni-binary-copy\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749664 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-var-lib-kubelet\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749687 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-multus-cni-dir\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749712 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-run-k8s-cni-cncf-io\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749738 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/831b06bd-095f-439f-a166-088c2d584933-rootfs\") pod \"machine-config-daemon-74dp4\" (UID: \"831b06bd-095f-439f-a166-088c2d584933\") " pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749762 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vm968\" (UniqueName: \"kubernetes.io/projected/959add28-5508-49d7-8fe3-404acef398b0-kube-api-access-vm968\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749788 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-system-cni-dir\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749849 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-var-lib-cni-bin\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749878 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-run-multus-certs\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.749944 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-run-netns\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.757724 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-run-netns\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.757948 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/935c51ff-7414-4687-be92-cda52803d7b0-cni-binary-copy\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.757978 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/935c51ff-7414-4687-be92-cda52803d7b0-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.758010 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/935c51ff-7414-4687-be92-cda52803d7b0-system-cni-dir\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.758046 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/935c51ff-7414-4687-be92-cda52803d7b0-os-release\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.758242 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/935c51ff-7414-4687-be92-cda52803d7b0-tuning-conf-dir\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.758513 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.759638 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-cnibin\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.759680 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-etc-kubernetes\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.763206 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/959add28-5508-49d7-8fe3-404acef398b0-multus-daemon-config\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.763278 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/935c51ff-7414-4687-be92-cda52803d7b0-system-cni-dir\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.763280 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-multus-socket-dir-parent\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.763301 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-var-lib-cni-multus\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.763779 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/959add28-5508-49d7-8fe3-404acef398b0-cni-binary-copy\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.763817 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/935c51ff-7414-4687-be92-cda52803d7b0-cnibin\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.764315 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/831b06bd-095f-439f-a166-088c2d584933-mcd-auth-proxy-config\") pod \"machine-config-daemon-74dp4\" (UID: \"831b06bd-095f-439f-a166-088c2d584933\") " pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.764639 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-hostroot\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765025 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/831b06bd-095f-439f-a166-088c2d584933-proxy-tls\") pod \"machine-config-daemon-74dp4\" (UID: \"831b06bd-095f-439f-a166-088c2d584933\") " pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765070 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-multus-conf-dir\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765108 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-os-release\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765127 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/831b06bd-095f-439f-a166-088c2d584933-rootfs\") pod \"machine-config-daemon-74dp4\" (UID: \"831b06bd-095f-439f-a166-088c2d584933\") " pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765146 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-var-lib-kubelet\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765159 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/935c51ff-7414-4687-be92-cda52803d7b0-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765272 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-multus-cni-dir\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765293 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-var-lib-cni-bin\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765312 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-run-multus-certs\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765327 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-host-run-k8s-cni-cncf-io\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765342 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/935c51ff-7414-4687-be92-cda52803d7b0-os-release\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765368 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765387 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/959add28-5508-49d7-8fe3-404acef398b0-system-cni-dir\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765525 4912 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765540 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765552 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765562 4912 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765572 4912 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765582 4912 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765592 4912 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765625 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765635 4912 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765646 4912 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765659 4912 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765668 4912 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765677 4912 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765690 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765701 4912 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765710 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765721 4912 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765732 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765741 4912 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765750 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765762 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765775 4912 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765785 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765795 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765804 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/935c51ff-7414-4687-be92-cda52803d7b0-cni-binary-copy\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765807 4912 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765839 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765851 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765861 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765872 4912 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765882 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765892 4912 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765905 4912 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765916 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765926 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765937 4912 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.765948 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.783400 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.791280 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fxph\" (UniqueName: \"kubernetes.io/projected/831b06bd-095f-439f-a166-088c2d584933-kube-api-access-7fxph\") pod \"machine-config-daemon-74dp4\" (UID: \"831b06bd-095f-439f-a166-088c2d584933\") " pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.791811 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx9wb\" (UniqueName: \"kubernetes.io/projected/935c51ff-7414-4687-be92-cda52803d7b0-kube-api-access-cx9wb\") pod \"multus-additional-cni-plugins-9vfng\" (UID: \"935c51ff-7414-4687-be92-cda52803d7b0\") " pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.792728 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vm968\" (UniqueName: \"kubernetes.io/projected/959add28-5508-49d7-8fe3-404acef398b0-kube-api-access-vm968\") pod \"multus-rp5rf\" (UID: \"959add28-5508-49d7-8fe3-404acef398b0\") " pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.798787 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.828904 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.852641 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.865717 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.875064 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7qdqq"] Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.876014 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.885189 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.885497 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.885653 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.890223 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.890494 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.890629 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.890853 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.906296 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.921302 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.932977 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.935630 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.952658 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.956650 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-rp5rf" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.958726 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-9vfng" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968565 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-node-log\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968602 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkffq\" (UniqueName: \"kubernetes.io/projected/57520f45-3ab9-41ea-8a10-3fa74c02f04b-kube-api-access-mkffq\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968621 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-cni-bin\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968639 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968659 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-systemd-units\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968674 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-run-ovn-kubernetes\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968724 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-env-overrides\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968753 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-slash\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968773 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-etc-openvswitch\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968792 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-openvswitch\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968811 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovnkube-config\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968828 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-log-socket\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968848 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-ovn\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968866 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-kubelet\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968883 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovn-node-metrics-cert\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968906 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-run-netns\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968924 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-systemd\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968942 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovnkube-script-lib\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968961 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-cni-netd\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.968980 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-var-lib-openvswitch\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.970368 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.980939 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:57 crc kubenswrapper[4912]: I1208 21:18:57.994098 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:58 crc kubenswrapper[4912]: W1208 21:18:58.004515 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod935c51ff_7414_4687_be92_cda52803d7b0.slice/crio-5ef482ff2ed2df816592750e432776017fb64302577210e9a2d2456053886868 WatchSource:0}: Error finding container 5ef482ff2ed2df816592750e432776017fb64302577210e9a2d2456053886868: Status 404 returned error can't find the container with id 5ef482ff2ed2df816592750e432776017fb64302577210e9a2d2456053886868 Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.013499 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.026221 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.040531 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.054599 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.069743 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.069887 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-env-overrides\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.069914 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-slash\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.069938 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovnkube-config\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.069956 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-etc-openvswitch\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.069975 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-openvswitch\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.069995 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-ovn\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070011 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-log-socket\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070028 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-kubelet\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070059 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovn-node-metrics-cert\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070078 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-systemd\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070098 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-run-netns\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070115 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovnkube-script-lib\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070134 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-cni-netd\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070164 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-var-lib-openvswitch\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070197 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkffq\" (UniqueName: \"kubernetes.io/projected/57520f45-3ab9-41ea-8a10-3fa74c02f04b-kube-api-access-mkffq\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070214 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-node-log\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070232 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-cni-bin\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070251 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070268 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-systemd-units\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070284 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-run-ovn-kubernetes\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070361 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-run-ovn-kubernetes\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.070441 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:18:59.070426603 +0000 UTC m=+20.933428676 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070978 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-env-overrides\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071380 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-run-netns\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071400 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovnkube-config\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071470 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-slash\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.070845 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-openvswitch\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071692 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-etc-openvswitch\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071706 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-cni-bin\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071738 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071753 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-node-log\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071776 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-cni-netd\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071804 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-var-lib-openvswitch\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071820 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-systemd-units\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071827 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-log-socket\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071853 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-kubelet\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071856 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-ovn\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071888 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovnkube-script-lib\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.071908 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-systemd\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.076279 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.077525 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovn-node-metrics-cert\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.090402 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.095276 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkffq\" (UniqueName: \"kubernetes.io/projected/57520f45-3ab9-41ea-8a10-3fa74c02f04b-kube-api-access-mkffq\") pod \"ovnkube-node-7qdqq\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.170730 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.170768 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.170790 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.170810 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.170901 4912 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.170946 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:18:59.170933759 +0000 UTC m=+21.033935842 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.171013 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.171024 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.171052 4912 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.171077 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:18:59.171070632 +0000 UTC m=+21.034072715 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.171106 4912 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.171124 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:18:59.171118613 +0000 UTC m=+21.034120696 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.171162 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.171172 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.171179 4912 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:18:58 crc kubenswrapper[4912]: E1208 21:18:58.171200 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:18:59.171193905 +0000 UTC m=+21.034195988 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.196382 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:18:58 crc kubenswrapper[4912]: W1208 21:18:58.215521 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57520f45_3ab9_41ea_8a10_3fa74c02f04b.slice/crio-bb6830e62db0923b2f6703a0bc995e1562700c40e2c0d9bb156debffcf3831e8 WatchSource:0}: Error finding container bb6830e62db0923b2f6703a0bc995e1562700c40e2c0d9bb156debffcf3831e8: Status 404 returned error can't find the container with id bb6830e62db0923b2f6703a0bc995e1562700c40e2c0d9bb156debffcf3831e8 Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.431308 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.432137 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.433301 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.433924 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.435000 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.435581 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.436224 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.437636 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.438359 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.439384 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.439976 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.442353 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.442934 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.443514 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.444878 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.445669 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.448820 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.449755 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.450298 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.451196 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.452338 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.453404 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.454271 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.454914 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.456736 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.457303 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.459342 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.461223 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.461852 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.462660 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.463753 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.464421 4912 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.464569 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.467379 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.468657 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.469229 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.471887 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.472820 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.473567 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.474965 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.476414 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.477111 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.477833 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.479337 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.480767 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.481457 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.482065 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.482909 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.483651 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.485129 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.485761 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.486948 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.487599 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.488268 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.489432 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.490207 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.497280 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.517487 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.534774 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.549089 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.567290 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.582411 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.596428 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.619591 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.633440 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.644889 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.644960 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.644974 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a84b5549e2fddf7a070ca11d81c8153894890760a08dc58d8abb2f5fcb5e9d4b"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.647251 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.647545 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rp5rf" event={"ID":"959add28-5508-49d7-8fe3-404acef398b0","Type":"ContainerStarted","Data":"ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.647744 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rp5rf" event={"ID":"959add28-5508-49d7-8fe3-404acef398b0","Type":"ContainerStarted","Data":"292689df90b611a819e2a49b3ceec5c45040681bb36ee151a607041bb07f8576"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.649582 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.649740 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.649828 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"516449e52fd58f42f32a4c2c6e6f2187f41c13398fc7e39a978ce63284015967"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.651347 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-q6mfz" event={"ID":"b73fac72-73a2-42cf-8d43-6aa187f7ba9c","Type":"ContainerStarted","Data":"6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.651493 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-q6mfz" event={"ID":"b73fac72-73a2-42cf-8d43-6aa187f7ba9c","Type":"ContainerStarted","Data":"e80a064727cce23d990a46868d46ce0e8ffb8a37a9a03234d7936fc3967f1747"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.652835 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b" exitCode=0 Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.652956 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.653078 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"bb6830e62db0923b2f6703a0bc995e1562700c40e2c0d9bb156debffcf3831e8"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.654160 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"e1f8caf62ac5900b824f5070f872bd0a293c254f7d779c1643a9ff66ed11c4a8"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.658946 4912 generic.go:334] "Generic (PLEG): container finished" podID="935c51ff-7414-4687-be92-cda52803d7b0" containerID="e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920" exitCode=0 Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.659107 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" event={"ID":"935c51ff-7414-4687-be92-cda52803d7b0","Type":"ContainerDied","Data":"e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.659193 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" event={"ID":"935c51ff-7414-4687-be92-cda52803d7b0","Type":"ContainerStarted","Data":"5ef482ff2ed2df816592750e432776017fb64302577210e9a2d2456053886868"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.660400 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.661577 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.661642 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"381cf744772518578173b03e5cf9e0ede19eb2b10d9595f2950762d53672b87c"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.663484 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.667267 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b"} Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.667925 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.677975 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.747741 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.793936 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.819566 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.843944 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.858074 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.873135 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.888603 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.902118 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.913908 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.932950 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.957300 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.972355 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:58 crc kubenswrapper[4912]: I1208 21:18:58.990133 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.008466 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.030522 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.053611 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.072690 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.082122 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.082403 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:01.082354711 +0000 UTC m=+22.945356794 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.090879 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.164927 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.182986 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.183070 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.183097 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.183118 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.183278 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.183297 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.183310 4912 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.183369 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:01.183350389 +0000 UTC m=+23.046352472 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.183437 4912 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.183461 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:01.183454612 +0000 UTC m=+23.046456685 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.183504 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.183515 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.183523 4912 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.183546 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:01.183540314 +0000 UTC m=+23.046542397 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.183579 4912 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.183601 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:01.183595665 +0000 UTC m=+23.046597748 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.185368 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.198479 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.216467 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.236564 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.252433 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.284101 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.426913 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.426987 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.427058 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.427086 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.427175 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:18:59 crc kubenswrapper[4912]: E1208 21:18:59.427308 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.672577 4912 generic.go:334] "Generic (PLEG): container finished" podID="935c51ff-7414-4687-be92-cda52803d7b0" containerID="e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a" exitCode=0 Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.672622 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" event={"ID":"935c51ff-7414-4687-be92-cda52803d7b0","Type":"ContainerDied","Data":"e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a"} Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.696682 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45"} Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.696729 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4"} Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.696742 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f"} Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.696750 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03"} Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.696759 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047"} Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.696768 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096"} Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.699796 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.726573 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.742854 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.754193 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.771133 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.794076 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.810981 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.826281 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.842593 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.842784 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-6vbwt"] Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.843464 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6vbwt" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.846445 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.846689 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.846852 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.847104 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.855750 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.873563 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.890189 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.890487 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c279k\" (UniqueName: \"kubernetes.io/projected/84c31be3-897d-43a4-9d4a-6767eeaa79de-kube-api-access-c279k\") pod \"node-ca-6vbwt\" (UID: \"84c31be3-897d-43a4-9d4a-6767eeaa79de\") " pod="openshift-image-registry/node-ca-6vbwt" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.890560 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84c31be3-897d-43a4-9d4a-6767eeaa79de-host\") pod \"node-ca-6vbwt\" (UID: \"84c31be3-897d-43a4-9d4a-6767eeaa79de\") " pod="openshift-image-registry/node-ca-6vbwt" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.890589 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/84c31be3-897d-43a4-9d4a-6767eeaa79de-serviceca\") pod \"node-ca-6vbwt\" (UID: \"84c31be3-897d-43a4-9d4a-6767eeaa79de\") " pod="openshift-image-registry/node-ca-6vbwt" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.906080 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.921252 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.965240 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.991225 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84c31be3-897d-43a4-9d4a-6767eeaa79de-host\") pod \"node-ca-6vbwt\" (UID: \"84c31be3-897d-43a4-9d4a-6767eeaa79de\") " pod="openshift-image-registry/node-ca-6vbwt" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.991268 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/84c31be3-897d-43a4-9d4a-6767eeaa79de-serviceca\") pod \"node-ca-6vbwt\" (UID: \"84c31be3-897d-43a4-9d4a-6767eeaa79de\") " pod="openshift-image-registry/node-ca-6vbwt" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.991319 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c279k\" (UniqueName: \"kubernetes.io/projected/84c31be3-897d-43a4-9d4a-6767eeaa79de-kube-api-access-c279k\") pod \"node-ca-6vbwt\" (UID: \"84c31be3-897d-43a4-9d4a-6767eeaa79de\") " pod="openshift-image-registry/node-ca-6vbwt" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.991504 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84c31be3-897d-43a4-9d4a-6767eeaa79de-host\") pod \"node-ca-6vbwt\" (UID: \"84c31be3-897d-43a4-9d4a-6767eeaa79de\") " pod="openshift-image-registry/node-ca-6vbwt" Dec 08 21:18:59 crc kubenswrapper[4912]: I1208 21:18:59.993076 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/84c31be3-897d-43a4-9d4a-6767eeaa79de-serviceca\") pod \"node-ca-6vbwt\" (UID: \"84c31be3-897d-43a4-9d4a-6767eeaa79de\") " pod="openshift-image-registry/node-ca-6vbwt" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.001770 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:18:59Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.029608 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c279k\" (UniqueName: \"kubernetes.io/projected/84c31be3-897d-43a4-9d4a-6767eeaa79de-kube-api-access-c279k\") pod \"node-ca-6vbwt\" (UID: \"84c31be3-897d-43a4-9d4a-6767eeaa79de\") " pod="openshift-image-registry/node-ca-6vbwt" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.059523 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.097458 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.140809 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.159333 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6vbwt" Dec 08 21:19:00 crc kubenswrapper[4912]: W1208 21:19:00.174447 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84c31be3_897d_43a4_9d4a_6767eeaa79de.slice/crio-6f1e6de312e4c57aa8f19b4e74b0d3f1aa24798797b8c9845d25ef1a6c5d0808 WatchSource:0}: Error finding container 6f1e6de312e4c57aa8f19b4e74b0d3f1aa24798797b8c9845d25ef1a6c5d0808: Status 404 returned error can't find the container with id 6f1e6de312e4c57aa8f19b4e74b0d3f1aa24798797b8c9845d25ef1a6c5d0808 Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.185985 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.224413 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.261541 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.304198 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.343184 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.384959 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.425371 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.465482 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.702071 4912 generic.go:334] "Generic (PLEG): container finished" podID="935c51ff-7414-4687-be92-cda52803d7b0" containerID="bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31" exitCode=0 Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.702179 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" event={"ID":"935c51ff-7414-4687-be92-cda52803d7b0","Type":"ContainerDied","Data":"bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31"} Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.704967 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6vbwt" event={"ID":"84c31be3-897d-43a4-9d4a-6767eeaa79de","Type":"ContainerStarted","Data":"a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2"} Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.705022 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6vbwt" event={"ID":"84c31be3-897d-43a4-9d4a-6767eeaa79de","Type":"ContainerStarted","Data":"6f1e6de312e4c57aa8f19b4e74b0d3f1aa24798797b8c9845d25ef1a6c5d0808"} Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.718896 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.736916 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.756283 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.774937 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.791431 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.808444 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.825335 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.839095 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.855871 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.874487 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.900862 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.942056 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:00 crc kubenswrapper[4912]: I1208 21:19:00.986215 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:00Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.028529 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.063930 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.103072 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.103168 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.103378 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:05.103307036 +0000 UTC m=+26.966309129 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.143096 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.201970 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.204381 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.204454 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.204477 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.204498 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.204617 4912 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.204646 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.204663 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.204668 4912 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.204684 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.204724 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:05.204695274 +0000 UTC m=+27.067697377 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.204734 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.204752 4912 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.204783 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:05.204757705 +0000 UTC m=+27.067759798 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.204677 4912 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.204832 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:05.204803927 +0000 UTC m=+27.067806010 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.204881 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:05.204867788 +0000 UTC m=+27.067869991 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.225657 4912 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.229315 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.229383 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.229398 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.229581 4912 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.230759 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.273800 4912 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.274657 4912 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.276686 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.276730 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.276739 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.276757 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.276773 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:01Z","lastTransitionTime":"2025-12-08T21:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.298325 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.302795 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.302831 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.302839 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.302853 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.302864 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:01Z","lastTransitionTime":"2025-12-08T21:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.303822 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.318845 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.327778 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.327825 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.327837 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.327858 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.327871 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:01Z","lastTransitionTime":"2025-12-08T21:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.346340 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.348317 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.355263 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.355344 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.355382 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.355420 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.355448 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:01Z","lastTransitionTime":"2025-12-08T21:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.369054 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.374530 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.374628 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.374659 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.374698 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.374723 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:01Z","lastTransitionTime":"2025-12-08T21:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.381906 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.392740 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.392917 4912 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.395078 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.395114 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.395124 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.395138 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.395149 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:01Z","lastTransitionTime":"2025-12-08T21:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.419313 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.427650 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.427722 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.427651 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.427794 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.427858 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:01 crc kubenswrapper[4912]: E1208 21:19:01.427920 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.458697 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.497358 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.497397 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.497407 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.497421 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.497429 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:01Z","lastTransitionTime":"2025-12-08T21:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.503548 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.538852 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.581735 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.600231 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.600274 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.600286 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.600307 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.600323 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:01Z","lastTransitionTime":"2025-12-08T21:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.623258 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.710981 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.711046 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.711059 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.711076 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.711088 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:01Z","lastTransitionTime":"2025-12-08T21:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.714896 4912 generic.go:334] "Generic (PLEG): container finished" podID="935c51ff-7414-4687-be92-cda52803d7b0" containerID="1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949" exitCode=0 Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.714976 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" event={"ID":"935c51ff-7414-4687-be92-cda52803d7b0","Type":"ContainerDied","Data":"1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949"} Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.716466 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1"} Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.737773 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.754793 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.770705 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.774917 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.787235 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.788371 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.804497 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.815555 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.816372 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.816468 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.816603 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.816697 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:01Z","lastTransitionTime":"2025-12-08T21:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.842384 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.878952 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.919010 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.919084 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.919095 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.919109 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.919120 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:01Z","lastTransitionTime":"2025-12-08T21:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.926881 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:01 crc kubenswrapper[4912]: I1208 21:19:01.960583 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.000479 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:01Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.022086 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.022122 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.022132 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.022147 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.022157 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:02Z","lastTransitionTime":"2025-12-08T21:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.040134 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.089318 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.119406 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.124640 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.124679 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.124688 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.124705 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.124716 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:02Z","lastTransitionTime":"2025-12-08T21:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.165187 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.202077 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.227258 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.227319 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.227336 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.227356 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.227369 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:02Z","lastTransitionTime":"2025-12-08T21:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.239304 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.282407 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.320759 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.329990 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.330048 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.330060 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.330076 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.330091 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:02Z","lastTransitionTime":"2025-12-08T21:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.362852 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.403195 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.432700 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.432748 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.432759 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.432780 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.432793 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:02Z","lastTransitionTime":"2025-12-08T21:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.439724 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.482027 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.525090 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.538901 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.538959 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.538975 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.538997 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.539010 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:02Z","lastTransitionTime":"2025-12-08T21:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.561842 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.601638 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.640313 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.641740 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.641787 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.641801 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.641825 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.641840 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:02Z","lastTransitionTime":"2025-12-08T21:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.680807 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.722769 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.724199 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8"} Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.726837 4912 generic.go:334] "Generic (PLEG): container finished" podID="935c51ff-7414-4687-be92-cda52803d7b0" containerID="044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4" exitCode=0 Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.727613 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" event={"ID":"935c51ff-7414-4687-be92-cda52803d7b0","Type":"ContainerDied","Data":"044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4"} Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.748116 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.748165 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.748176 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.748197 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.748212 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:02Z","lastTransitionTime":"2025-12-08T21:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.773026 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.800773 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.838521 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.855086 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.855135 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.855146 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.855170 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.855183 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:02Z","lastTransitionTime":"2025-12-08T21:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.879650 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.921531 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.958165 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.958218 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.958231 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.958254 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.958270 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:02Z","lastTransitionTime":"2025-12-08T21:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:02 crc kubenswrapper[4912]: I1208 21:19:02.964358 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.002210 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:02Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.042086 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.068283 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.068323 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.068337 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.068354 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.068365 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:03Z","lastTransitionTime":"2025-12-08T21:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.095368 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.126854 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.162368 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.171090 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.171154 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.171166 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.171192 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.171205 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:03Z","lastTransitionTime":"2025-12-08T21:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.202670 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.239834 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.274065 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.274126 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.274141 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.274165 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.274178 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:03Z","lastTransitionTime":"2025-12-08T21:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.281612 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.328132 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.362447 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.376549 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.376606 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.376615 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.376629 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.376638 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:03Z","lastTransitionTime":"2025-12-08T21:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.402953 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.426772 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:03 crc kubenswrapper[4912]: E1208 21:19:03.426925 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.427004 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:03 crc kubenswrapper[4912]: E1208 21:19:03.427095 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.427161 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:03 crc kubenswrapper[4912]: E1208 21:19:03.427223 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.479436 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.479500 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.479515 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.479538 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.479554 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:03Z","lastTransitionTime":"2025-12-08T21:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.582472 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.582845 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.582918 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.582998 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.583088 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:03Z","lastTransitionTime":"2025-12-08T21:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.686434 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.686862 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.686947 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.687043 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.687317 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:03Z","lastTransitionTime":"2025-12-08T21:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.734767 4912 generic.go:334] "Generic (PLEG): container finished" podID="935c51ff-7414-4687-be92-cda52803d7b0" containerID="8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db" exitCode=0 Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.735142 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" event={"ID":"935c51ff-7414-4687-be92-cda52803d7b0","Type":"ContainerDied","Data":"8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db"} Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.751256 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.768801 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.780022 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.790547 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.790599 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.790616 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.790637 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.790650 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:03Z","lastTransitionTime":"2025-12-08T21:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.793105 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.812462 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.830099 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.844403 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.859641 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.877867 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.894002 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.894078 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.894091 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.894114 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.894128 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:03Z","lastTransitionTime":"2025-12-08T21:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.897380 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.916848 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.931403 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.948319 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.964409 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.998396 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.998454 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.998466 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.998490 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:03 crc kubenswrapper[4912]: I1208 21:19:03.998512 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:03Z","lastTransitionTime":"2025-12-08T21:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.000820 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.101410 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.101469 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.101483 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.101511 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.101526 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:04Z","lastTransitionTime":"2025-12-08T21:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.204475 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.204539 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.204550 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.204573 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.204593 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:04Z","lastTransitionTime":"2025-12-08T21:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.307491 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.307835 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.307912 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.307979 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.308059 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:04Z","lastTransitionTime":"2025-12-08T21:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.411920 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.411981 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.412005 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.412077 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.412105 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:04Z","lastTransitionTime":"2025-12-08T21:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.515115 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.515168 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.515179 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.515199 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.515212 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:04Z","lastTransitionTime":"2025-12-08T21:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.618073 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.618123 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.618135 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.618154 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.618168 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:04Z","lastTransitionTime":"2025-12-08T21:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.722167 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.722237 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.722257 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.722289 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.722312 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:04Z","lastTransitionTime":"2025-12-08T21:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.825063 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.825106 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.825132 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.825153 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.825171 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:04Z","lastTransitionTime":"2025-12-08T21:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.927815 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.927878 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.927891 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.927908 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:04 crc kubenswrapper[4912]: I1208 21:19:04.927919 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:04Z","lastTransitionTime":"2025-12-08T21:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.030395 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.030439 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.030447 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.030461 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.030474 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:05Z","lastTransitionTime":"2025-12-08T21:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.133986 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.134049 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.134062 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.134086 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.134100 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:05Z","lastTransitionTime":"2025-12-08T21:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.145169 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.145413 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:13.145384058 +0000 UTC m=+35.008386141 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.236393 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.236692 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.236792 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.236880 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.236958 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:05Z","lastTransitionTime":"2025-12-08T21:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.245822 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.245968 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.246109 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.246229 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.246011 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.246432 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.246551 4912 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.246689 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:13.246669243 +0000 UTC m=+35.109671346 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.246061 4912 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.246231 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.246987 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.247011 4912 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.246305 4912 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.246891 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:13.246880219 +0000 UTC m=+35.109882312 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.247116 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:13.247101384 +0000 UTC m=+35.110103467 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.247130 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:13.247122515 +0000 UTC m=+35.110124598 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.339948 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.339991 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.340003 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.340020 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.340045 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:05Z","lastTransitionTime":"2025-12-08T21:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.427443 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.427519 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.427443 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.427736 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.427864 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:05 crc kubenswrapper[4912]: E1208 21:19:05.428111 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.443060 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.443105 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.443115 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.443134 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.443148 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:05Z","lastTransitionTime":"2025-12-08T21:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.546006 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.546119 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.546145 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.546179 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.546211 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:05Z","lastTransitionTime":"2025-12-08T21:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.649640 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.649726 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.649788 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.649821 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.649841 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:05Z","lastTransitionTime":"2025-12-08T21:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.749667 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.750193 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.753332 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.753413 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.753440 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.753474 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.753501 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:05Z","lastTransitionTime":"2025-12-08T21:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.761840 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" event={"ID":"935c51ff-7414-4687-be92-cda52803d7b0","Type":"ContainerStarted","Data":"3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.775550 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.783899 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.796483 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.814954 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.833308 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.862012 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.862199 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.862225 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.862256 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.862279 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:05Z","lastTransitionTime":"2025-12-08T21:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.862803 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.886755 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.904821 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.923281 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.938470 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.954367 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.965568 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.965626 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.965638 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.965660 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.965674 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:05Z","lastTransitionTime":"2025-12-08T21:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.970705 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:05 crc kubenswrapper[4912]: I1208 21:19:05.986759 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.000426 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:05Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.020475 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.034526 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.049026 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.061354 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.067971 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.068000 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.068010 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.068050 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.068070 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.077312 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.095209 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.108624 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.133759 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.149293 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.162531 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.171119 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.171173 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.171183 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.171204 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.171215 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.179221 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.192587 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.204364 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.219282 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.232341 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.245801 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.264160 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.273842 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.273889 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.273904 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.273920 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.273932 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.376985 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.377027 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.377052 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.377072 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.377082 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.479778 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.479851 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.479870 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.479902 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.479920 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.583253 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.583303 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.583317 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.583337 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.583350 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.690025 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.690818 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.690905 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.690982 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.691077 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.765928 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.765976 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.791460 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.794140 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.794181 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.794191 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.794207 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.794217 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.808637 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.827548 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.854978 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.870427 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.886155 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.897068 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.897131 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.897150 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.897177 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.897189 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:06Z","lastTransitionTime":"2025-12-08T21:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.900841 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.914398 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.929150 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.952229 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.969870 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.985800 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:06 crc kubenswrapper[4912]: I1208 21:19:06.996930 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:06Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.000254 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.000299 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.000312 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.000335 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.000351 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.014028 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:07Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.030309 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:07Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.047072 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:07Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.104991 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.105070 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.105085 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.105106 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.105121 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.207833 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.207875 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.207885 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.207901 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.207912 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.311602 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.311646 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.311656 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.311674 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.311687 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.414530 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.414577 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.414586 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.414605 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.414616 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.426904 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.426922 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.426944 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:07 crc kubenswrapper[4912]: E1208 21:19:07.427157 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:07 crc kubenswrapper[4912]: E1208 21:19:07.427257 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:07 crc kubenswrapper[4912]: E1208 21:19:07.427348 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.518594 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.518690 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.518717 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.518806 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.518839 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.621684 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.621746 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.621758 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.621778 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.621794 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.729677 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.729725 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.729737 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.729754 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.729767 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.832789 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.832843 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.832853 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.832875 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.832890 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.936180 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.936239 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.936251 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.936273 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:07 crc kubenswrapper[4912]: I1208 21:19:07.936289 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:07Z","lastTransitionTime":"2025-12-08T21:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.040508 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.040558 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.040571 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.040590 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.040602 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.143615 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.143705 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.143731 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.143771 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.143817 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.247014 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.247345 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.247488 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.247596 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.247689 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.350525 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.350773 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.350840 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.350904 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.350994 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.442629 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.453796 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.453895 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.453926 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.453963 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.453988 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.456994 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.471743 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.485752 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.500140 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.521889 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.535997 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.557008 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.557106 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.557118 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.557137 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.557171 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.558162 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.579697 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.594771 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.606823 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.619673 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.631520 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.645984 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.657058 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.660223 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.660259 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.660271 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.660292 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.660303 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.762884 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.762961 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.762975 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.763001 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.763015 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.775062 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/0.log" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.777454 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80" exitCode=1 Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.777503 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80"} Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.778185 4912 scope.go:117] "RemoveContainer" containerID="db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.805852 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.823909 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.836607 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.866456 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.866519 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.866538 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.866567 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.866590 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.878788 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.901524 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.922642 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.940087 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.958554 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.969359 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.969419 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.969429 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.969452 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.969465 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:08Z","lastTransitionTime":"2025-12-08T21:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.972912 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:08 crc kubenswrapper[4912]: I1208 21:19:08.988021 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.003235 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:09Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.015630 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:09Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.028680 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:09Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.041723 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:09Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.065490 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:08Z\\\",\\\"message\\\":\\\"8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199011 6199 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199093 6199 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199421 6199 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:07.199598 6199 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199659 6199 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 21:19:07.199706 6199 factory.go:656] Stopping watch factory\\\\nI1208 21:19:07.199651 6199 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199726 6199 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 21:19:07.199728 6199 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:09Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.072376 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.072417 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.072432 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.072450 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.072463 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.174983 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.175052 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.175066 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.175082 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.175093 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.277621 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.277669 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.277681 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.277698 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.277708 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.379972 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.380014 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.380023 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.380062 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.380072 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.427655 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:09 crc kubenswrapper[4912]: E1208 21:19:09.427806 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.428004 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.428020 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:09 crc kubenswrapper[4912]: E1208 21:19:09.428305 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:09 crc kubenswrapper[4912]: E1208 21:19:09.428419 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.482368 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.482422 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.482430 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.482447 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.482458 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.584899 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.584951 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.584963 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.584976 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.584993 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.687860 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.687940 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.687958 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.687973 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.688007 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.791179 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.791241 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.791253 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.791280 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.791296 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.893823 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.893885 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.893903 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.893925 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.893940 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.997339 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.997404 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.997419 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.997445 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:09 crc kubenswrapper[4912]: I1208 21:19:09.997461 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:09Z","lastTransitionTime":"2025-12-08T21:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.103235 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.103319 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.103334 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.103374 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.103389 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.206484 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.206542 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.206552 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.206568 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.206578 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.310357 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.310447 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.310464 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.310516 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.310535 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.414740 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.414833 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.414859 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.414898 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.414922 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.519689 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.519772 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.519784 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.519803 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.519815 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.623045 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.623090 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.623106 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.623126 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.623140 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.711496 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg"] Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.712295 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.715757 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.716211 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.727262 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.727317 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.727384 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.727430 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.727447 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.734104 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.749154 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.767521 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.782875 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.787597 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/0.log" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.791723 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6"} Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.792142 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.798304 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.810449 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5986eadb-31e4-483f-ab2e-e427be240400-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hdvlg\" (UID: \"5986eadb-31e4-483f-ab2e-e427be240400\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.810511 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5986eadb-31e4-483f-ab2e-e427be240400-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hdvlg\" (UID: \"5986eadb-31e4-483f-ab2e-e427be240400\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.810543 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfj2x\" (UniqueName: \"kubernetes.io/projected/5986eadb-31e4-483f-ab2e-e427be240400-kube-api-access-nfj2x\") pod \"ovnkube-control-plane-749d76644c-hdvlg\" (UID: \"5986eadb-31e4-483f-ab2e-e427be240400\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.810569 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5986eadb-31e4-483f-ab2e-e427be240400-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hdvlg\" (UID: \"5986eadb-31e4-483f-ab2e-e427be240400\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.820756 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:08Z\\\",\\\"message\\\":\\\"8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199011 6199 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199093 6199 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199421 6199 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:07.199598 6199 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199659 6199 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 21:19:07.199706 6199 factory.go:656] Stopping watch factory\\\\nI1208 21:19:07.199651 6199 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199726 6199 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 21:19:07.199728 6199 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.829923 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.829969 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.830015 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.830064 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.830082 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.834681 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.848143 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.862490 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.876410 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.890204 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.903303 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.911505 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5986eadb-31e4-483f-ab2e-e427be240400-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hdvlg\" (UID: \"5986eadb-31e4-483f-ab2e-e427be240400\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.911600 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5986eadb-31e4-483f-ab2e-e427be240400-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hdvlg\" (UID: \"5986eadb-31e4-483f-ab2e-e427be240400\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.911627 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfj2x\" (UniqueName: \"kubernetes.io/projected/5986eadb-31e4-483f-ab2e-e427be240400-kube-api-access-nfj2x\") pod \"ovnkube-control-plane-749d76644c-hdvlg\" (UID: \"5986eadb-31e4-483f-ab2e-e427be240400\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.911670 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5986eadb-31e4-483f-ab2e-e427be240400-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hdvlg\" (UID: \"5986eadb-31e4-483f-ab2e-e427be240400\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.912726 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5986eadb-31e4-483f-ab2e-e427be240400-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hdvlg\" (UID: \"5986eadb-31e4-483f-ab2e-e427be240400\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.913123 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5986eadb-31e4-483f-ab2e-e427be240400-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hdvlg\" (UID: \"5986eadb-31e4-483f-ab2e-e427be240400\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.927510 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5986eadb-31e4-483f-ab2e-e427be240400-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hdvlg\" (UID: \"5986eadb-31e4-483f-ab2e-e427be240400\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.930129 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.934223 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.934285 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.934306 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.934338 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.934354 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:10Z","lastTransitionTime":"2025-12-08T21:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.940146 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfj2x\" (UniqueName: \"kubernetes.io/projected/5986eadb-31e4-483f-ab2e-e427be240400-kube-api-access-nfj2x\") pod \"ovnkube-control-plane-749d76644c-hdvlg\" (UID: \"5986eadb-31e4-483f-ab2e-e427be240400\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.947086 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.961775 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.975382 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.987808 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:10 crc kubenswrapper[4912]: I1208 21:19:10.999831 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:10Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.010463 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.026747 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.026896 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.037261 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.037336 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.037347 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.037363 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.037375 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: W1208 21:19:11.050236 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5986eadb_31e4_483f_ab2e_e427be240400.slice/crio-eb6e36d8c0de2e9750472302c1b07ff7bc9c5f671ef5d1702da5c0eb9aab89a9 WatchSource:0}: Error finding container eb6e36d8c0de2e9750472302c1b07ff7bc9c5f671ef5d1702da5c0eb9aab89a9: Status 404 returned error can't find the container with id eb6e36d8c0de2e9750472302c1b07ff7bc9c5f671ef5d1702da5c0eb9aab89a9 Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.061004 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.078790 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.093499 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.112961 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.139377 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.140780 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.140905 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.140978 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.141081 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.141171 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.167915 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.189896 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.205707 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.219167 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.241981 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:08Z\\\",\\\"message\\\":\\\"8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199011 6199 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199093 6199 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199421 6199 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:07.199598 6199 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199659 6199 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 21:19:07.199706 6199 factory.go:656] Stopping watch factory\\\\nI1208 21:19:07.199651 6199 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199726 6199 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 21:19:07.199728 6199 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.243892 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.243939 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.243950 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.243969 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.243980 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.255255 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.270087 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.346696 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.346742 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.346751 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.346771 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.346787 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.427176 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.427263 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.427321 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.427393 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.427440 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.427483 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.449793 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.449848 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.449863 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.449881 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.449894 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.459887 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-lhjln"] Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.460427 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.460500 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.482084 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.495484 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.507332 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.520764 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.521089 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnbm7\" (UniqueName: \"kubernetes.io/projected/6f259abd-9b12-458f-975d-68996ae1265c-kube-api-access-tnbm7\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.521938 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.533783 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.545195 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.552801 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.552843 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.552882 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.552899 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.552912 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.557504 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.572543 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.585489 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.596691 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.610147 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.622181 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.622223 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnbm7\" (UniqueName: \"kubernetes.io/projected/6f259abd-9b12-458f-975d-68996ae1265c-kube-api-access-tnbm7\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.622398 4912 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.622560 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs podName:6f259abd-9b12-458f-975d-68996ae1265c nodeName:}" failed. No retries permitted until 2025-12-08 21:19:12.122531502 +0000 UTC m=+33.985533585 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs") pod "network-metrics-daemon-lhjln" (UID: "6f259abd-9b12-458f-975d-68996ae1265c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.624349 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.636013 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.638154 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnbm7\" (UniqueName: \"kubernetes.io/projected/6f259abd-9b12-458f-975d-68996ae1265c-kube-api-access-tnbm7\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.652619 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.655423 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.655932 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.655962 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.655996 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.656009 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.668080 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.681764 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.703490 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:08Z\\\",\\\"message\\\":\\\"8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199011 6199 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199093 6199 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199421 6199 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:07.199598 6199 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199659 6199 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 21:19:07.199706 6199 factory.go:656] Stopping watch factory\\\\nI1208 21:19:07.199651 6199 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199726 6199 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 21:19:07.199728 6199 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.729688 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.729752 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.729768 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.729797 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.729817 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.744096 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.748434 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.748525 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.748545 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.748568 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.748583 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.767417 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.774012 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.774064 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.774074 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.774092 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.774103 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.788483 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.792619 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.792850 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.792948 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.793115 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.793236 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.799501 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" event={"ID":"5986eadb-31e4-483f-ab2e-e427be240400","Type":"ContainerStarted","Data":"ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff"} Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.799578 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" event={"ID":"5986eadb-31e4-483f-ab2e-e427be240400","Type":"ContainerStarted","Data":"eb6e36d8c0de2e9750472302c1b07ff7bc9c5f671ef5d1702da5c0eb9aab89a9"} Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.801487 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/1.log" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.802014 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/0.log" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.803770 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6" exitCode=1 Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.803819 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6"} Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.803890 4912 scope.go:117] "RemoveContainer" containerID="db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.804798 4912 scope.go:117] "RemoveContainer" containerID="a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6" Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.805020 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.813771 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.818867 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.832431 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.832474 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.832486 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.832506 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.832519 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.844401 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: E1208 21:19:11.844547 4912 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.846472 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.846521 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.846531 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.846552 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.846564 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.846772 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.860268 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.878573 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.889579 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.904381 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.931117 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:08Z\\\",\\\"message\\\":\\\"8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199011 6199 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199093 6199 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199421 6199 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:07.199598 6199 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199659 6199 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 21:19:07.199706 6199 factory.go:656] Stopping watch factory\\\\nI1208 21:19:07.199651 6199 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199726 6199 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 21:19:07.199728 6199 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"21:19:11.546990 6342 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547000 6342 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547015 6342 services_controller.go:451] Built service openshift-network-console/networking-console-plugin cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.246\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1208 21:19:11.547064 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.946102 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.949182 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.949228 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.949237 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.949259 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.949271 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:11Z","lastTransitionTime":"2025-12-08T21:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.961218 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.976489 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:11 crc kubenswrapper[4912]: I1208 21:19:11.991290 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:11Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.004518 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.016938 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.038880 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.052087 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.052120 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.052130 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.052147 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.052157 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.052432 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.064827 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.080603 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.133313 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:12 crc kubenswrapper[4912]: E1208 21:19:12.133528 4912 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:12 crc kubenswrapper[4912]: E1208 21:19:12.133624 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs podName:6f259abd-9b12-458f-975d-68996ae1265c nodeName:}" failed. No retries permitted until 2025-12-08 21:19:13.133598467 +0000 UTC m=+34.996600560 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs") pod "network-metrics-daemon-lhjln" (UID: "6f259abd-9b12-458f-975d-68996ae1265c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.154429 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.154478 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.154498 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.154525 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.154541 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.214103 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.231593 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:08Z\\\",\\\"message\\\":\\\"8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199011 6199 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199093 6199 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199421 6199 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:07.199598 6199 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199659 6199 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 21:19:07.199706 6199 factory.go:656] Stopping watch factory\\\\nI1208 21:19:07.199651 6199 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199726 6199 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 21:19:07.199728 6199 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"21:19:11.546990 6342 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547000 6342 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547015 6342 services_controller.go:451] Built service openshift-network-console/networking-console-plugin cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.246\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1208 21:19:11.547064 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.243465 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.254517 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.257819 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.257857 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.257870 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.257894 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.257911 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.268532 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.279682 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.298143 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.312240 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.335309 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.350521 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.360450 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.360485 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.360496 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.360519 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.360532 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.367493 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.381347 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.392309 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.405922 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.418050 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.439532 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.453860 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.463009 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.463078 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.463088 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.463103 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.463112 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.468094 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.566713 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.566758 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.566773 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.566795 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.566809 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.668932 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.668980 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.668993 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.669013 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.669025 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.771485 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.771548 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.771568 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.771598 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.771615 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.812371 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" event={"ID":"5986eadb-31e4-483f-ab2e-e427be240400","Type":"ContainerStarted","Data":"00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773"} Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.815875 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/1.log" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.822912 4912 scope.go:117] "RemoveContainer" containerID="a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6" Dec 08 21:19:12 crc kubenswrapper[4912]: E1208 21:19:12.823250 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.834977 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.851635 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.868747 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.873626 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.873698 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.873728 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.873767 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.873795 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.884466 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.899461 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.914026 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.927798 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.951302 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db87504f412092c74804d2cc3a73226f35fcc58dbe1fc31f9650a5eb98a84d80\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:08Z\\\",\\\"message\\\":\\\"8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199011 6199 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199093 6199 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199421 6199 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:07.199598 6199 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:07.199659 6199 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 21:19:07.199706 6199 factory.go:656] Stopping watch factory\\\\nI1208 21:19:07.199651 6199 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1208 21:19:07.199726 6199 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 21:19:07.199728 6199 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"21:19:11.546990 6342 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547000 6342 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547015 6342 services_controller.go:451] Built service openshift-network-console/networking-console-plugin cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.246\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1208 21:19:11.547064 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.967777 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.976897 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.976937 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.976954 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.976981 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.977000 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:12Z","lastTransitionTime":"2025-12-08T21:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.983517 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:12 crc kubenswrapper[4912]: I1208 21:19:12.998984 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.016534 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.029186 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.040567 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.062202 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.076762 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.083831 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.083889 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.083902 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.083928 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.083942 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.090027 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.102871 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.112883 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.124193 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.144181 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.144418 4912 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.144547 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs podName:6f259abd-9b12-458f-975d-68996ae1265c nodeName:}" failed. No retries permitted until 2025-12-08 21:19:15.14451446 +0000 UTC m=+37.007516713 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs") pod "network-metrics-daemon-lhjln" (UID: "6f259abd-9b12-458f-975d-68996ae1265c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.147213 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.163860 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.178389 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.186827 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.186908 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.186923 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.186942 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.186958 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.192714 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.203553 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.216977 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.230163 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.244982 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.245295 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:19:29.245256241 +0000 UTC m=+51.108258344 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.245596 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.286335 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.289287 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.289337 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.289346 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.289368 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.289380 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.324596 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.346356 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.346451 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.346602 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.346653 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.346683 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.346748 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.346775 4912 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.346812 4912 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.346872 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:29.346839664 +0000 UTC m=+51.209841777 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.346921 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:29.346905356 +0000 UTC m=+51.209907479 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.346967 4912 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.347029 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:29.347002568 +0000 UTC m=+51.210004681 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.347190 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.347222 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.347245 4912 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.347310 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:19:29.347288656 +0000 UTC m=+51.210290779 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.367856 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"21:19:11.546990 6342 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547000 6342 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547015 6342 services_controller.go:451] Built service openshift-network-console/networking-console-plugin cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.246\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1208 21:19:11.547064 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.392452 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.392509 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.392525 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.392547 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.392563 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.404317 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.427743 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.427793 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.427850 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.427945 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.427995 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.428117 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.428225 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:13 crc kubenswrapper[4912]: E1208 21:19:13.428380 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.448723 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.490482 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.495818 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.495908 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.495938 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.495971 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.495994 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.599011 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.599128 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.599148 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.599192 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.599218 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.702585 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.702654 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.702669 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.702694 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.702711 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.806604 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.806672 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.806693 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.806726 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.806748 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.910187 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.910246 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.910257 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.910277 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:13 crc kubenswrapper[4912]: I1208 21:19:13.910290 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:13Z","lastTransitionTime":"2025-12-08T21:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.013403 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.013455 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.013465 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.013484 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.013496 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.116457 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.116516 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.116529 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.116552 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.116569 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.219896 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.219972 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.219992 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.220023 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.220081 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.324208 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.324288 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.324299 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.324317 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.324327 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.428017 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.428089 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.428100 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.428121 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.428134 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.530970 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.531055 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.531077 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.531102 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.531120 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.633521 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.633642 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.633664 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.633688 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.633703 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.735636 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.735678 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.735695 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.735709 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.735719 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.837857 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.837889 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.837897 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.837909 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.837920 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.961555 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.961612 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.961622 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.961635 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:14 crc kubenswrapper[4912]: I1208 21:19:14.961648 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:14Z","lastTransitionTime":"2025-12-08T21:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.064418 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.064454 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.064464 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.064478 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.064488 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.166496 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:15 crc kubenswrapper[4912]: E1208 21:19:15.166714 4912 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:15 crc kubenswrapper[4912]: E1208 21:19:15.166855 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs podName:6f259abd-9b12-458f-975d-68996ae1265c nodeName:}" failed. No retries permitted until 2025-12-08 21:19:19.166820078 +0000 UTC m=+41.029822311 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs") pod "network-metrics-daemon-lhjln" (UID: "6f259abd-9b12-458f-975d-68996ae1265c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.168007 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.168076 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.168108 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.168125 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.168136 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.273405 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.273485 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.273507 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.273541 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.273565 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.377657 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.377714 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.377725 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.377742 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.377752 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.427582 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.427583 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.427753 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.427737 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:15 crc kubenswrapper[4912]: E1208 21:19:15.427897 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:15 crc kubenswrapper[4912]: E1208 21:19:15.428204 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:15 crc kubenswrapper[4912]: E1208 21:19:15.428392 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:15 crc kubenswrapper[4912]: E1208 21:19:15.428599 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.481309 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.481368 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.481383 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.481404 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.481420 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.584680 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.585169 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.585284 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.585427 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.585545 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.689244 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.689297 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.689306 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.689522 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.689549 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.792182 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.792245 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.792258 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.792280 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.792296 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.895655 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.895727 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.895739 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.895773 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.895783 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.998719 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.998769 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.998782 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.998799 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:15 crc kubenswrapper[4912]: I1208 21:19:15.998811 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:15Z","lastTransitionTime":"2025-12-08T21:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.102594 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.102660 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.102673 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.102698 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.102716 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.207894 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.207964 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.207980 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.208002 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.208015 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.311130 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.311187 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.311200 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.311222 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.311236 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.414371 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.414403 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.414413 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.414426 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.414434 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.517791 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.517846 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.517859 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.517879 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.517891 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.622862 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.622904 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.622912 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.622929 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.622938 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.726914 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.726973 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.726983 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.727003 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.727015 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.830408 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.830466 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.830476 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.830498 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.830510 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.933426 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.933500 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.933512 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.933535 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:16 crc kubenswrapper[4912]: I1208 21:19:16.933550 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:16Z","lastTransitionTime":"2025-12-08T21:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.036233 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.036285 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.036300 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.036318 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.036347 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:17Z","lastTransitionTime":"2025-12-08T21:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.140208 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.140299 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.140324 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.140362 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.140387 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:17Z","lastTransitionTime":"2025-12-08T21:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.244486 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.244559 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.244577 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.244640 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.244671 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:17Z","lastTransitionTime":"2025-12-08T21:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.347828 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.347886 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.348403 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.348427 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.348684 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:17Z","lastTransitionTime":"2025-12-08T21:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.427834 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.427903 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:17 crc kubenswrapper[4912]: E1208 21:19:17.427998 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.428107 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.428507 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:17 crc kubenswrapper[4912]: E1208 21:19:17.428659 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:17 crc kubenswrapper[4912]: E1208 21:19:17.428686 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:17 crc kubenswrapper[4912]: E1208 21:19:17.428515 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.452654 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.454131 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.454164 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.454194 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.454216 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:17Z","lastTransitionTime":"2025-12-08T21:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.557095 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.557153 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.557165 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.557184 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.557199 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:17Z","lastTransitionTime":"2025-12-08T21:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.660198 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.660246 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.660256 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.660275 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.660287 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:17Z","lastTransitionTime":"2025-12-08T21:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.768593 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.768910 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.768925 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.768942 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.768952 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:17Z","lastTransitionTime":"2025-12-08T21:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.872716 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.873102 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.873338 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.873542 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.873657 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:17Z","lastTransitionTime":"2025-12-08T21:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.977174 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.977603 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.977836 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.978183 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:17 crc kubenswrapper[4912]: I1208 21:19:17.978387 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:17Z","lastTransitionTime":"2025-12-08T21:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.081222 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.081703 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.081868 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.082013 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.082181 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:18Z","lastTransitionTime":"2025-12-08T21:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.185510 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.185871 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.186076 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.186183 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.186248 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:18Z","lastTransitionTime":"2025-12-08T21:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.289231 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.289287 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.289303 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.289329 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.289344 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:18Z","lastTransitionTime":"2025-12-08T21:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.393267 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.394466 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.394650 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.394907 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.395180 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:18Z","lastTransitionTime":"2025-12-08T21:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.444102 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.460788 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.482103 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.496560 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.498404 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.498453 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.498469 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.498493 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.498518 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:18Z","lastTransitionTime":"2025-12-08T21:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.511608 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.532100 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.550012 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.571417 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"21:19:11.546990 6342 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547000 6342 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547015 6342 services_controller.go:451] Built service openshift-network-console/networking-console-plugin cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.246\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1208 21:19:11.547064 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.586694 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.600608 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.601933 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.601969 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.601983 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.602006 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.602022 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:18Z","lastTransitionTime":"2025-12-08T21:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.624488 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.643933 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.656904 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.669669 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.690574 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.704671 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.704708 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.704718 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.704732 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.704743 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:18Z","lastTransitionTime":"2025-12-08T21:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.705344 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.720742 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.808083 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.808123 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.808131 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.808144 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.808155 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:18Z","lastTransitionTime":"2025-12-08T21:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.910872 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.910915 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.910927 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.910945 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:18 crc kubenswrapper[4912]: I1208 21:19:18.910957 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:18Z","lastTransitionTime":"2025-12-08T21:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.013732 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.013775 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.013785 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.013799 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.013809 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:19Z","lastTransitionTime":"2025-12-08T21:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.116731 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.116808 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.116861 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.116894 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.116910 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:19Z","lastTransitionTime":"2025-12-08T21:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.213998 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:19 crc kubenswrapper[4912]: E1208 21:19:19.214195 4912 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:19 crc kubenswrapper[4912]: E1208 21:19:19.214278 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs podName:6f259abd-9b12-458f-975d-68996ae1265c nodeName:}" failed. No retries permitted until 2025-12-08 21:19:27.214260496 +0000 UTC m=+49.077262579 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs") pod "network-metrics-daemon-lhjln" (UID: "6f259abd-9b12-458f-975d-68996ae1265c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.223641 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.223768 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.223788 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.223818 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.223840 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:19Z","lastTransitionTime":"2025-12-08T21:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.326967 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.327019 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.327030 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.327068 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.327082 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:19Z","lastTransitionTime":"2025-12-08T21:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.426854 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:19 crc kubenswrapper[4912]: E1208 21:19:19.426990 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.427122 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.427249 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.427320 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:19 crc kubenswrapper[4912]: E1208 21:19:19.427275 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:19 crc kubenswrapper[4912]: E1208 21:19:19.427496 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:19 crc kubenswrapper[4912]: E1208 21:19:19.427753 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.429304 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.429337 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.429351 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.429370 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.429383 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:19Z","lastTransitionTime":"2025-12-08T21:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.531695 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.531747 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.531756 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.531778 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.531790 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:19Z","lastTransitionTime":"2025-12-08T21:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.635094 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.635176 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.635202 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.635237 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.635263 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:19Z","lastTransitionTime":"2025-12-08T21:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.739590 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.739666 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.739684 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.739718 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.739764 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:19Z","lastTransitionTime":"2025-12-08T21:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.842897 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.842951 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.842962 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.842981 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.842992 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:19Z","lastTransitionTime":"2025-12-08T21:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.946581 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.946642 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.946662 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.946686 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:19 crc kubenswrapper[4912]: I1208 21:19:19.946705 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:19Z","lastTransitionTime":"2025-12-08T21:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.050214 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.050262 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.050272 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.050298 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.050309 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:20Z","lastTransitionTime":"2025-12-08T21:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.152663 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.152720 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.152734 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.152755 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.152769 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:20Z","lastTransitionTime":"2025-12-08T21:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.255233 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.255295 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.255304 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.255317 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.255329 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:20Z","lastTransitionTime":"2025-12-08T21:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.357918 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.358607 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.358627 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.358654 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.358668 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:20Z","lastTransitionTime":"2025-12-08T21:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.461957 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.462012 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.462025 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.462076 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.462091 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:20Z","lastTransitionTime":"2025-12-08T21:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.564698 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.564750 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.564764 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.564784 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.564800 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:20Z","lastTransitionTime":"2025-12-08T21:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.667942 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.668005 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.668023 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.668076 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.668091 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:20Z","lastTransitionTime":"2025-12-08T21:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.771322 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.771366 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.771378 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.771396 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.771408 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:20Z","lastTransitionTime":"2025-12-08T21:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.873931 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.874002 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.874016 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.874086 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.874102 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:20Z","lastTransitionTime":"2025-12-08T21:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.977465 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.977547 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.977561 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.977582 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:20 crc kubenswrapper[4912]: I1208 21:19:20.977602 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:20Z","lastTransitionTime":"2025-12-08T21:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.083161 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.083298 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.083330 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.083368 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.083393 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:21Z","lastTransitionTime":"2025-12-08T21:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.187890 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.187994 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.188017 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.188084 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.188112 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:21Z","lastTransitionTime":"2025-12-08T21:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.291629 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.291728 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.291755 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.291812 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.291841 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:21Z","lastTransitionTime":"2025-12-08T21:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.395374 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.395440 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.395452 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.395474 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.395488 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:21Z","lastTransitionTime":"2025-12-08T21:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.427913 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.427971 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.428001 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.427941 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:21 crc kubenswrapper[4912]: E1208 21:19:21.428170 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:21 crc kubenswrapper[4912]: E1208 21:19:21.428360 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:21 crc kubenswrapper[4912]: E1208 21:19:21.428580 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:21 crc kubenswrapper[4912]: E1208 21:19:21.428730 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.498531 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.498589 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.498610 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.498636 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.498653 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:21Z","lastTransitionTime":"2025-12-08T21:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.602612 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.602662 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.602676 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.602698 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.602711 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:21Z","lastTransitionTime":"2025-12-08T21:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.705700 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.705768 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.705786 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.705814 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.705836 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:21Z","lastTransitionTime":"2025-12-08T21:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.810155 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.810235 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.810296 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.810341 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.810361 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:21Z","lastTransitionTime":"2025-12-08T21:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.913771 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.913833 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.913848 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.913871 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:21 crc kubenswrapper[4912]: I1208 21:19:21.913890 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:21Z","lastTransitionTime":"2025-12-08T21:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.018389 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.018431 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.018442 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.018462 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.018480 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.120930 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.120975 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.120985 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.121006 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.121019 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.161800 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.161839 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.161848 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.161863 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.161874 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: E1208 21:19:22.181150 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:22Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.185559 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.185606 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.185616 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.185634 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.185652 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: E1208 21:19:22.199110 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:22Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.204200 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.204222 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.204230 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.204244 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.204258 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: E1208 21:19:22.233778 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:22Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.238570 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.238632 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.238654 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.238676 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.238689 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: E1208 21:19:22.255704 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:22Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.259861 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.259892 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.259905 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.259928 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.259942 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: E1208 21:19:22.274658 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:22Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:22 crc kubenswrapper[4912]: E1208 21:19:22.274780 4912 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.276438 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.276469 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.276480 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.276499 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.276512 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.379908 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.379954 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.379964 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.379984 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.379995 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.482696 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.482762 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.482776 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.482798 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.482811 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.586486 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.586551 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.586572 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.586608 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.586634 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.689737 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.689826 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.689860 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.689896 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.689920 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.795104 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.795194 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.795221 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.795255 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.795278 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.899833 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.899894 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.899916 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.899948 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:22 crc kubenswrapper[4912]: I1208 21:19:22.899970 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:22Z","lastTransitionTime":"2025-12-08T21:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.003576 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.003650 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.003672 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.003704 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.003728 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:23Z","lastTransitionTime":"2025-12-08T21:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.107234 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.107289 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.107300 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.107332 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.107343 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:23Z","lastTransitionTime":"2025-12-08T21:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.212529 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.212599 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.212612 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.212636 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.212649 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:23Z","lastTransitionTime":"2025-12-08T21:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.316639 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.316713 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.316727 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.316748 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.316763 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:23Z","lastTransitionTime":"2025-12-08T21:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.420275 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.420377 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.420402 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.420440 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.420466 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:23Z","lastTransitionTime":"2025-12-08T21:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.427865 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.428214 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:23 crc kubenswrapper[4912]: E1208 21:19:23.428498 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.428370 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.428373 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:23 crc kubenswrapper[4912]: E1208 21:19:23.428819 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:23 crc kubenswrapper[4912]: E1208 21:19:23.428954 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:23 crc kubenswrapper[4912]: E1208 21:19:23.429274 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.523929 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.524444 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.524597 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.524752 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.524913 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:23Z","lastTransitionTime":"2025-12-08T21:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.628684 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.628740 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.628749 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.628776 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.628787 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:23Z","lastTransitionTime":"2025-12-08T21:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.731804 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.731866 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.731879 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.731913 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.731927 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:23Z","lastTransitionTime":"2025-12-08T21:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.835287 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.835354 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.835372 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.835399 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.835418 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:23Z","lastTransitionTime":"2025-12-08T21:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.938116 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.938178 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.938202 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.938237 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:23 crc kubenswrapper[4912]: I1208 21:19:23.938260 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:23Z","lastTransitionTime":"2025-12-08T21:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.042400 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.042493 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.042519 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.042560 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.042587 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:24Z","lastTransitionTime":"2025-12-08T21:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.145803 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.146063 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.146075 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.146092 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.146100 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:24Z","lastTransitionTime":"2025-12-08T21:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.249799 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.249865 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.249882 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.249916 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.249937 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:24Z","lastTransitionTime":"2025-12-08T21:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.353756 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.353839 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.353861 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.353892 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.353910 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:24Z","lastTransitionTime":"2025-12-08T21:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.429870 4912 scope.go:117] "RemoveContainer" containerID="a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.457216 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.457625 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.457788 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.457927 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.458087 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:24Z","lastTransitionTime":"2025-12-08T21:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.562204 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.562280 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.562304 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.562337 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.562361 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:24Z","lastTransitionTime":"2025-12-08T21:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.666383 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.666444 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.666463 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.666491 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.666508 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:24Z","lastTransitionTime":"2025-12-08T21:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.769616 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.769661 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.769670 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.769687 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.769697 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:24Z","lastTransitionTime":"2025-12-08T21:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.869753 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/1.log" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.872074 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.872107 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.872118 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.872137 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.872162 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:24Z","lastTransitionTime":"2025-12-08T21:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.872684 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427"} Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.873869 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.900100 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:24Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.921134 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:24Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.940735 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:24Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.957391 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:24Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.971709 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:24Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.974740 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.974788 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.974800 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.974822 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.974833 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:24Z","lastTransitionTime":"2025-12-08T21:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.986670 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:24Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:24 crc kubenswrapper[4912]: I1208 21:19:24.999835 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:24Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.010749 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.024626 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.034884 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.047377 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.061586 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.073904 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.076985 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.077045 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.077058 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.077077 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.077086 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:25Z","lastTransitionTime":"2025-12-08T21:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.088113 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.100128 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.115027 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.137762 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"21:19:11.546990 6342 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547000 6342 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547015 6342 services_controller.go:451] Built service openshift-network-console/networking-console-plugin cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.246\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1208 21:19:11.547064 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.179572 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.179632 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.179645 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.179669 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.179683 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:25Z","lastTransitionTime":"2025-12-08T21:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.282646 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.282691 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.282702 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.282723 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.282733 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:25Z","lastTransitionTime":"2025-12-08T21:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.386334 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.386394 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.386407 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.386428 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.386445 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:25Z","lastTransitionTime":"2025-12-08T21:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.427424 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.427477 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.427517 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:25 crc kubenswrapper[4912]: E1208 21:19:25.427672 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.427846 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:25 crc kubenswrapper[4912]: E1208 21:19:25.427974 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:25 crc kubenswrapper[4912]: E1208 21:19:25.428025 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:25 crc kubenswrapper[4912]: E1208 21:19:25.428111 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.488604 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.488671 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.488698 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.488722 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.488738 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:25Z","lastTransitionTime":"2025-12-08T21:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.592867 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.592937 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.592956 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.592984 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.593001 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:25Z","lastTransitionTime":"2025-12-08T21:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.696890 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.696961 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.696985 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.697021 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.697095 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:25Z","lastTransitionTime":"2025-12-08T21:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.800838 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.800897 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.800911 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.800935 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.800948 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:25Z","lastTransitionTime":"2025-12-08T21:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.878780 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/2.log" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.879733 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/1.log" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.884134 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427" exitCode=1 Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.884183 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427"} Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.884234 4912 scope.go:117] "RemoveContainer" containerID="a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.885207 4912 scope.go:117] "RemoveContainer" containerID="c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427" Dec 08 21:19:25 crc kubenswrapper[4912]: E1208 21:19:25.885576 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.903490 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.904092 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.904122 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.904131 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.904147 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.904158 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:25Z","lastTransitionTime":"2025-12-08T21:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.918961 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.934210 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.948641 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.961434 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:25 crc kubenswrapper[4912]: I1208 21:19:25.975242 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:25.999922 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:25Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.006708 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.006774 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.006785 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.006802 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.006814 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:26Z","lastTransitionTime":"2025-12-08T21:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.014729 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.026994 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.039601 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.053054 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.063209 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.078611 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.094841 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.107569 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.109715 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.109762 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.109775 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.109792 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.109802 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:26Z","lastTransitionTime":"2025-12-08T21:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.130504 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a4ac1bc8276aed037c660595e023b62898a4be21009483537d6da6b92268b1b6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"21:19:11.546990 6342 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547000 6342 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nI1208 21:19:11.547015 6342 services_controller.go:451] Built service openshift-network-console/networking-console-plugin cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.246\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1208 21:19:11.547064 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:25Z\\\",\\\"message\\\":\\\"finition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1208 21:19:25.279291 6541 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279346 6541 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279415 6541 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279468 6541 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279604 6541 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.286304 6541 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1208 21:19:25.286428 6541 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1208 21:19:25.286508 6541 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:25.286550 6541 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:25.286666 6541 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.148739 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.213345 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.213414 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.213433 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.213468 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.213489 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:26Z","lastTransitionTime":"2025-12-08T21:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.317206 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.317271 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.317289 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.317315 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.317333 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:26Z","lastTransitionTime":"2025-12-08T21:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.421670 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.421733 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.421746 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.421775 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.421790 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:26Z","lastTransitionTime":"2025-12-08T21:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.524997 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.525097 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.525111 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.525135 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.525151 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:26Z","lastTransitionTime":"2025-12-08T21:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.627992 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.628030 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.628059 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.628077 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.628087 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:26Z","lastTransitionTime":"2025-12-08T21:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.731067 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.731121 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.731135 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.731156 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.731172 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:26Z","lastTransitionTime":"2025-12-08T21:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.834378 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.834438 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.834448 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.834477 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.834490 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:26Z","lastTransitionTime":"2025-12-08T21:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.889917 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/2.log" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.894728 4912 scope.go:117] "RemoveContainer" containerID="c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427" Dec 08 21:19:26 crc kubenswrapper[4912]: E1208 21:19:26.895193 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.909862 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.922410 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.933131 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.937186 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.937221 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.937233 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.937252 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.937265 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:26Z","lastTransitionTime":"2025-12-08T21:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.948325 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.961193 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.974437 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:26 crc kubenswrapper[4912]: I1208 21:19:26.995060 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:25Z\\\",\\\"message\\\":\\\"finition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1208 21:19:25.279291 6541 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279346 6541 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279415 6541 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279468 6541 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279604 6541 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.286304 6541 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1208 21:19:25.286428 6541 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1208 21:19:25.286508 6541 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:25.286550 6541 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:25.286666 6541 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:26Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.010281 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:27Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.025434 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:27Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.038573 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:27Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.040419 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.040479 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.040493 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.040516 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.040530 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:27Z","lastTransitionTime":"2025-12-08T21:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.052286 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:27Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.063461 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:27Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.075842 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:27Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.098904 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:27Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.111632 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:27Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.124670 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:27Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.137974 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:27Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.142845 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.143234 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.143344 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.143501 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.143627 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:27Z","lastTransitionTime":"2025-12-08T21:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.220127 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:27 crc kubenswrapper[4912]: E1208 21:19:27.220321 4912 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:27 crc kubenswrapper[4912]: E1208 21:19:27.220404 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs podName:6f259abd-9b12-458f-975d-68996ae1265c nodeName:}" failed. No retries permitted until 2025-12-08 21:19:43.220381386 +0000 UTC m=+65.083383479 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs") pod "network-metrics-daemon-lhjln" (UID: "6f259abd-9b12-458f-975d-68996ae1265c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.246627 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.246978 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.247165 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.247254 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.247344 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:27Z","lastTransitionTime":"2025-12-08T21:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.350436 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.350874 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.351092 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.351437 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.351660 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:27Z","lastTransitionTime":"2025-12-08T21:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.427720 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.427822 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:27 crc kubenswrapper[4912]: E1208 21:19:27.427905 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:27 crc kubenswrapper[4912]: E1208 21:19:27.427986 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.427720 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:27 crc kubenswrapper[4912]: E1208 21:19:27.428179 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.427720 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:27 crc kubenswrapper[4912]: E1208 21:19:27.428361 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.454781 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.455076 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.455210 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.455304 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.455377 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:27Z","lastTransitionTime":"2025-12-08T21:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.558498 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.558538 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.558549 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.558576 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.558592 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:27Z","lastTransitionTime":"2025-12-08T21:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.662657 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.662730 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.662749 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.662779 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.662804 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:27Z","lastTransitionTime":"2025-12-08T21:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.767927 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.767985 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.768004 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.768068 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.768087 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:27Z","lastTransitionTime":"2025-12-08T21:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.871878 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.871949 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.871966 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.872342 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.872377 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:27Z","lastTransitionTime":"2025-12-08T21:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.976625 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.976693 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.976710 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.976737 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:27 crc kubenswrapper[4912]: I1208 21:19:27.976756 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:27Z","lastTransitionTime":"2025-12-08T21:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.081810 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.081905 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.081935 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.081975 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.081999 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:28Z","lastTransitionTime":"2025-12-08T21:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.185749 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.185802 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.185818 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.185847 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.185866 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:28Z","lastTransitionTime":"2025-12-08T21:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.288735 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.288792 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.288806 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.288827 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.288840 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:28Z","lastTransitionTime":"2025-12-08T21:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.392252 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.392310 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.392325 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.392344 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.392356 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:28Z","lastTransitionTime":"2025-12-08T21:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.442801 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.457072 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.474613 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.488700 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.495153 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.495217 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.495234 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.495263 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.495282 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:28Z","lastTransitionTime":"2025-12-08T21:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.509679 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.525990 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.543411 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.569593 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:25Z\\\",\\\"message\\\":\\\"finition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1208 21:19:25.279291 6541 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279346 6541 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279415 6541 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279468 6541 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279604 6541 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.286304 6541 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1208 21:19:25.286428 6541 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1208 21:19:25.286508 6541 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:25.286550 6541 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:25.286666 6541 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.588383 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.599534 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.599574 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.599586 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.599607 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.599620 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:28Z","lastTransitionTime":"2025-12-08T21:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.605023 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.621168 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.633977 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.642900 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.652755 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.702293 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.702350 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.702362 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.702382 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.702395 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:28Z","lastTransitionTime":"2025-12-08T21:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.721818 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.739485 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.753258 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:28Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.805549 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.805602 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.805614 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.805634 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.805645 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:28Z","lastTransitionTime":"2025-12-08T21:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.908389 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.908461 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.908480 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.908508 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:28 crc kubenswrapper[4912]: I1208 21:19:28.908527 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:28Z","lastTransitionTime":"2025-12-08T21:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.012149 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.012243 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.012286 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.012322 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.012348 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:29Z","lastTransitionTime":"2025-12-08T21:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.115539 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.115617 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.115636 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.115662 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.115681 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:29Z","lastTransitionTime":"2025-12-08T21:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.218770 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.218833 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.218852 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.218884 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.218901 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:29Z","lastTransitionTime":"2025-12-08T21:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.249475 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.249670 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:01.24964511 +0000 UTC m=+83.112647203 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.322472 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.322565 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.322588 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.322618 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.322641 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:29Z","lastTransitionTime":"2025-12-08T21:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.351327 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.351429 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.351458 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.351484 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.351578 4912 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.351663 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.351687 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.351703 4912 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.351718 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:20:01.351681575 +0000 UTC m=+83.214683688 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.351739 4912 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.351792 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.351842 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.351770 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:20:01.351746106 +0000 UTC m=+83.214748199 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.351867 4912 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.351949 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:20:01.35188974 +0000 UTC m=+83.214891883 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.352005 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:20:01.351978462 +0000 UTC m=+83.214980665 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.427083 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.427173 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.427173 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.427275 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.427450 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.427551 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.427681 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:29 crc kubenswrapper[4912]: E1208 21:19:29.427824 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.431202 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.431242 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.431254 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.431273 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.431286 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:29Z","lastTransitionTime":"2025-12-08T21:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.534843 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.534899 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.534911 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.534932 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.534946 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:29Z","lastTransitionTime":"2025-12-08T21:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.637898 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.637950 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.637962 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.637982 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.638000 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:29Z","lastTransitionTime":"2025-12-08T21:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.741576 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.741662 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.741675 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.741696 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.741711 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:29Z","lastTransitionTime":"2025-12-08T21:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.845513 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.845586 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.845658 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.845689 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.845705 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:29Z","lastTransitionTime":"2025-12-08T21:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.948678 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.948730 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.948740 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.948760 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:29 crc kubenswrapper[4912]: I1208 21:19:29.948772 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:29Z","lastTransitionTime":"2025-12-08T21:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.050697 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.050743 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.050756 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.050776 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.050794 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:30Z","lastTransitionTime":"2025-12-08T21:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.154125 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.154197 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.154215 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.154244 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.154265 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:30Z","lastTransitionTime":"2025-12-08T21:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.257543 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.257620 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.257643 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.257674 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.257700 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:30Z","lastTransitionTime":"2025-12-08T21:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.341791 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.354618 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.355302 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.360145 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.360201 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.361081 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.361128 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.361143 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:30Z","lastTransitionTime":"2025-12-08T21:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.370235 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.383507 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.393369 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.404332 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.415213 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.427338 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.444744 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:25Z\\\",\\\"message\\\":\\\"finition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1208 21:19:25.279291 6541 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279346 6541 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279415 6541 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279468 6541 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279604 6541 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.286304 6541 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1208 21:19:25.286428 6541 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1208 21:19:25.286508 6541 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:25.286550 6541 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:25.286666 6541 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.458568 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.463548 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.463623 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.463634 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.463652 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.463663 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:30Z","lastTransitionTime":"2025-12-08T21:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.472886 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.485312 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.498731 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.510446 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.526604 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.544207 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.556247 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.565719 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.565764 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.565775 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.565792 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.565804 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:30Z","lastTransitionTime":"2025-12-08T21:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.565808 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:30Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.668402 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.668496 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.668520 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.668611 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.668634 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:30Z","lastTransitionTime":"2025-12-08T21:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.771665 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.771722 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.771734 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.771757 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.771770 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:30Z","lastTransitionTime":"2025-12-08T21:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.875268 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.875355 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.875378 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.875411 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.875449 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:30Z","lastTransitionTime":"2025-12-08T21:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.979288 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.979383 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.979407 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.979447 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:30 crc kubenswrapper[4912]: I1208 21:19:30.979470 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:30Z","lastTransitionTime":"2025-12-08T21:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.084357 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.084470 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.084512 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.084601 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.084675 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:31Z","lastTransitionTime":"2025-12-08T21:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.187207 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.187264 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.187276 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.187297 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.187311 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:31Z","lastTransitionTime":"2025-12-08T21:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.290354 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.290417 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.290434 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.290460 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.290475 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:31Z","lastTransitionTime":"2025-12-08T21:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.393152 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.393229 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.393242 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.393267 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.393281 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:31Z","lastTransitionTime":"2025-12-08T21:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.427757 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.427844 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.427888 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.427857 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:31 crc kubenswrapper[4912]: E1208 21:19:31.428142 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:31 crc kubenswrapper[4912]: E1208 21:19:31.428198 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:31 crc kubenswrapper[4912]: E1208 21:19:31.428293 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:31 crc kubenswrapper[4912]: E1208 21:19:31.428399 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.495929 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.495996 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.496012 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.496047 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.496061 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:31Z","lastTransitionTime":"2025-12-08T21:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.599184 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.599236 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.599246 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.599263 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.599274 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:31Z","lastTransitionTime":"2025-12-08T21:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.701785 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.701828 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.701842 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.701863 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.701875 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:31Z","lastTransitionTime":"2025-12-08T21:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.805014 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.805129 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.805152 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.805185 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.805204 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:31Z","lastTransitionTime":"2025-12-08T21:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.909490 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.910215 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.910402 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.910448 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:31 crc kubenswrapper[4912]: I1208 21:19:31.910468 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:31Z","lastTransitionTime":"2025-12-08T21:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.015529 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.015625 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.015661 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.015699 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.015723 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.119089 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.119162 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.119180 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.119208 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.119228 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.222708 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.222761 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.222774 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.222836 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.222854 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.325555 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.325594 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.325603 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.325618 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.325628 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.428416 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.428468 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.428483 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.428504 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.428520 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.531542 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.531588 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.531604 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.531624 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.531637 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.634830 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.634908 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.634925 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.634948 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.634963 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.636640 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.636702 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.636712 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.636730 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.636741 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: E1208 21:19:32.660104 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:32Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.665763 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.665817 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.665833 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.665859 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.665876 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: E1208 21:19:32.686002 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:32Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.691154 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.691216 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.691228 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.691250 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.691303 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: E1208 21:19:32.709512 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:32Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.714188 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.714264 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.714278 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.714300 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.714316 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: E1208 21:19:32.734372 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:32Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.738478 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.738517 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.738527 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.738546 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.738556 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: E1208 21:19:32.752704 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:32Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:32 crc kubenswrapper[4912]: E1208 21:19:32.752829 4912 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.754841 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.754891 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.754904 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.754930 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.754940 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.858372 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.858426 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.858446 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.858471 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.858491 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.961167 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.961279 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.961301 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.961330 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:32 crc kubenswrapper[4912]: I1208 21:19:32.961348 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:32Z","lastTransitionTime":"2025-12-08T21:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.065398 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.065485 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.065518 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.065552 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.065575 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:33Z","lastTransitionTime":"2025-12-08T21:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.170263 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.170356 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.170382 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.170417 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.170442 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:33Z","lastTransitionTime":"2025-12-08T21:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.274521 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.274593 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.274608 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.274635 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.274649 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:33Z","lastTransitionTime":"2025-12-08T21:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.377655 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.377712 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.377724 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.377746 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.377758 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:33Z","lastTransitionTime":"2025-12-08T21:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.427584 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.427635 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.427703 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.427571 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:33 crc kubenswrapper[4912]: E1208 21:19:33.427809 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:33 crc kubenswrapper[4912]: E1208 21:19:33.427993 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:33 crc kubenswrapper[4912]: E1208 21:19:33.428184 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:33 crc kubenswrapper[4912]: E1208 21:19:33.428350 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.481223 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.481310 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.481333 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.481366 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.481384 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:33Z","lastTransitionTime":"2025-12-08T21:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.584456 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.584501 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.584510 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.584525 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.584534 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:33Z","lastTransitionTime":"2025-12-08T21:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.687851 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.688001 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.688023 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.688103 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.688131 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:33Z","lastTransitionTime":"2025-12-08T21:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.791653 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.791729 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.791767 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.791806 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.791831 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:33Z","lastTransitionTime":"2025-12-08T21:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.895742 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.895831 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.895857 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.895896 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.895923 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:33Z","lastTransitionTime":"2025-12-08T21:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.999737 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:33 crc kubenswrapper[4912]: I1208 21:19:33.999829 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:33.999853 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:33.999894 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:33.999915 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:33Z","lastTransitionTime":"2025-12-08T21:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.103623 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.103686 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.103709 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.103737 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.103758 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:34Z","lastTransitionTime":"2025-12-08T21:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.206318 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.206412 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.206432 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.206461 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.206483 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:34Z","lastTransitionTime":"2025-12-08T21:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.309764 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.309847 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.309861 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.309883 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.309898 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:34Z","lastTransitionTime":"2025-12-08T21:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.413125 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.413190 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.413206 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.413231 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.413247 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:34Z","lastTransitionTime":"2025-12-08T21:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.516524 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.516589 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.516605 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.516627 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.516641 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:34Z","lastTransitionTime":"2025-12-08T21:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.619842 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.619899 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.619912 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.619936 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.619951 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:34Z","lastTransitionTime":"2025-12-08T21:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.723352 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.723440 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.723467 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.723501 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.723524 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:34Z","lastTransitionTime":"2025-12-08T21:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.827507 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.827569 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.827588 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.827615 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.827634 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:34Z","lastTransitionTime":"2025-12-08T21:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.930860 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.930898 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.930910 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.930929 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:34 crc kubenswrapper[4912]: I1208 21:19:34.930942 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:34Z","lastTransitionTime":"2025-12-08T21:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.035421 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.035507 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.035526 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.035554 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.035572 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:35Z","lastTransitionTime":"2025-12-08T21:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.139028 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.139136 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.139158 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.139188 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.139205 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:35Z","lastTransitionTime":"2025-12-08T21:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.243015 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.243131 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.243158 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.243201 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.243228 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:35Z","lastTransitionTime":"2025-12-08T21:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.346541 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.346615 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.346640 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.346673 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.346697 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:35Z","lastTransitionTime":"2025-12-08T21:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.427277 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.427352 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.427411 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.427293 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:35 crc kubenswrapper[4912]: E1208 21:19:35.427493 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:35 crc kubenswrapper[4912]: E1208 21:19:35.427704 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:35 crc kubenswrapper[4912]: E1208 21:19:35.427837 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:35 crc kubenswrapper[4912]: E1208 21:19:35.427985 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.450279 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.450332 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.450342 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.450366 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.450376 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:35Z","lastTransitionTime":"2025-12-08T21:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.553172 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.553249 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.553271 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.553303 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.553327 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:35Z","lastTransitionTime":"2025-12-08T21:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.656282 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.656362 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.656385 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.656422 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.656444 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:35Z","lastTransitionTime":"2025-12-08T21:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.758828 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.758884 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.758900 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.758924 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.758939 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:35Z","lastTransitionTime":"2025-12-08T21:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.862308 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.862351 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.862361 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.862384 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.862395 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:35Z","lastTransitionTime":"2025-12-08T21:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.965786 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.965846 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.965860 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.965883 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:35 crc kubenswrapper[4912]: I1208 21:19:35.965899 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:35Z","lastTransitionTime":"2025-12-08T21:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.068481 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.068520 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.068530 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.068546 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.068556 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:36Z","lastTransitionTime":"2025-12-08T21:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.172080 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.172148 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.172160 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.172185 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.172200 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:36Z","lastTransitionTime":"2025-12-08T21:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.275141 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.275209 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.275227 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.275257 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.275280 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:36Z","lastTransitionTime":"2025-12-08T21:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.378479 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.378550 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.378563 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.378606 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.378622 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:36Z","lastTransitionTime":"2025-12-08T21:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.480977 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.481080 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.481096 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.481122 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.481134 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:36Z","lastTransitionTime":"2025-12-08T21:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.583978 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.584019 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.584028 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.584058 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.584068 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:36Z","lastTransitionTime":"2025-12-08T21:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.686909 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.686975 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.686998 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.687029 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.687085 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:36Z","lastTransitionTime":"2025-12-08T21:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.790013 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.790110 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.790127 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.790149 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.790163 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:36Z","lastTransitionTime":"2025-12-08T21:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.893273 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.893348 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.893366 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.893387 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.893403 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:36Z","lastTransitionTime":"2025-12-08T21:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.999495 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.999535 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.999548 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.999570 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:36 crc kubenswrapper[4912]: I1208 21:19:36.999580 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:36Z","lastTransitionTime":"2025-12-08T21:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.102727 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.103450 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.103780 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.103995 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.104201 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:37Z","lastTransitionTime":"2025-12-08T21:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.206753 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.206833 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.206847 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.206871 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.206885 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:37Z","lastTransitionTime":"2025-12-08T21:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.309082 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.309124 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.309135 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.309151 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.309164 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:37Z","lastTransitionTime":"2025-12-08T21:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.411863 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.411916 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.411928 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.411950 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.411963 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:37Z","lastTransitionTime":"2025-12-08T21:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.427201 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.427243 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.427256 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.427264 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:37 crc kubenswrapper[4912]: E1208 21:19:37.427409 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:37 crc kubenswrapper[4912]: E1208 21:19:37.427522 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:37 crc kubenswrapper[4912]: E1208 21:19:37.427668 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:37 crc kubenswrapper[4912]: E1208 21:19:37.427792 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.516112 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.516174 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.516188 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.516211 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.516225 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:37Z","lastTransitionTime":"2025-12-08T21:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.619301 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.619372 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.619391 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.619420 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.619441 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:37Z","lastTransitionTime":"2025-12-08T21:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.722710 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.722753 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.722764 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.722782 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.722791 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:37Z","lastTransitionTime":"2025-12-08T21:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.826715 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.826789 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.826802 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.826824 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.826842 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:37Z","lastTransitionTime":"2025-12-08T21:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.930149 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.930210 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.930224 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.930248 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:37 crc kubenswrapper[4912]: I1208 21:19:37.930262 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:37Z","lastTransitionTime":"2025-12-08T21:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.033416 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.033471 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.033484 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.033503 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.033512 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:38Z","lastTransitionTime":"2025-12-08T21:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.136114 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.136177 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.136189 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.136211 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.136229 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:38Z","lastTransitionTime":"2025-12-08T21:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.238686 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.238743 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.238763 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.238788 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.238804 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:38Z","lastTransitionTime":"2025-12-08T21:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.341681 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.341732 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.341744 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.341765 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.341779 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:38Z","lastTransitionTime":"2025-12-08T21:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.446016 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.446088 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.446888 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.446959 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.446984 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:38Z","lastTransitionTime":"2025-12-08T21:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.459527 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.477522 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.494079 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.510668 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.523758 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.534969 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.547880 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.550149 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.550195 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.550213 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.550235 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.550248 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:38Z","lastTransitionTime":"2025-12-08T21:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.560011 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.575301 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.589717 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0edeae5a-f749-4fcd-8467-9933e9d4f2cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d281679731c452ae40fa10c386d51c23b18fc26dfb3ce8783107c47e6a667bac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5651df48bf6073c8643ada32272a1913692902a285c45457d7964333c863ece\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85d03388b2cf2263dae4692f2641b08edc034f489e16d1ec0e132ab65b17358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.603202 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.617371 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.630959 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.644555 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.653824 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.653882 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.653902 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.653930 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.653948 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:38Z","lastTransitionTime":"2025-12-08T21:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.659496 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.675139 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.686595 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.706821 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:25Z\\\",\\\"message\\\":\\\"finition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1208 21:19:25.279291 6541 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279346 6541 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279415 6541 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279468 6541 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279604 6541 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.286304 6541 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1208 21:19:25.286428 6541 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1208 21:19:25.286508 6541 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:25.286550 6541 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:25.286666 6541 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:38Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.756265 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.756307 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.756318 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.756336 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.756346 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:38Z","lastTransitionTime":"2025-12-08T21:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.858890 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.858983 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.859002 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.859063 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.859091 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:38Z","lastTransitionTime":"2025-12-08T21:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.963594 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.963685 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.963708 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.963750 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:38 crc kubenswrapper[4912]: I1208 21:19:38.963774 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:38Z","lastTransitionTime":"2025-12-08T21:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.067664 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.067727 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.067740 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.067760 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.067774 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:39Z","lastTransitionTime":"2025-12-08T21:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.177604 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.177707 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.177742 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.177799 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.177829 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:39Z","lastTransitionTime":"2025-12-08T21:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.280846 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.280922 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.280942 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.281004 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.281023 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:39Z","lastTransitionTime":"2025-12-08T21:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.384018 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.384098 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.384112 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.384134 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.384149 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:39Z","lastTransitionTime":"2025-12-08T21:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.427827 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:39 crc kubenswrapper[4912]: E1208 21:19:39.428084 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.428426 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:39 crc kubenswrapper[4912]: E1208 21:19:39.428538 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.428767 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:39 crc kubenswrapper[4912]: E1208 21:19:39.428917 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.429020 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.429080 4912 scope.go:117] "RemoveContainer" containerID="c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427" Dec 08 21:19:39 crc kubenswrapper[4912]: E1208 21:19:39.429111 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:39 crc kubenswrapper[4912]: E1208 21:19:39.429273 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.487532 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.487585 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.487595 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.487620 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.487632 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:39Z","lastTransitionTime":"2025-12-08T21:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.590646 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.590714 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.590729 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.590755 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.590767 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:39Z","lastTransitionTime":"2025-12-08T21:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.693225 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.693274 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.693284 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.693302 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.693315 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:39Z","lastTransitionTime":"2025-12-08T21:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.796453 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.796512 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.796525 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.796549 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.796563 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:39Z","lastTransitionTime":"2025-12-08T21:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.899636 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.899696 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.899712 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.899735 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:39 crc kubenswrapper[4912]: I1208 21:19:39.899751 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:39Z","lastTransitionTime":"2025-12-08T21:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.002733 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.002782 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.002794 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.002818 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.002830 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:40Z","lastTransitionTime":"2025-12-08T21:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.105782 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.105879 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.105905 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.105951 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.105980 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:40Z","lastTransitionTime":"2025-12-08T21:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.210409 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.210477 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.210488 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.210510 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.210523 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:40Z","lastTransitionTime":"2025-12-08T21:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.314022 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.314128 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.314150 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.314183 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.314202 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:40Z","lastTransitionTime":"2025-12-08T21:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.418693 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.418805 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.418834 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.418868 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.418886 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:40Z","lastTransitionTime":"2025-12-08T21:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.521724 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.521793 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.521804 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.521824 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.521836 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:40Z","lastTransitionTime":"2025-12-08T21:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.624991 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.625066 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.625081 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.625105 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.625120 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:40Z","lastTransitionTime":"2025-12-08T21:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.728102 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.728161 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.728178 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.728200 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.728215 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:40Z","lastTransitionTime":"2025-12-08T21:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.831085 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.831141 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.831154 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.831176 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.831192 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:40Z","lastTransitionTime":"2025-12-08T21:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.933971 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.934028 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.934084 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.934107 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:40 crc kubenswrapper[4912]: I1208 21:19:40.934122 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:40Z","lastTransitionTime":"2025-12-08T21:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.036319 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.036382 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.036393 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.036412 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.036424 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:41Z","lastTransitionTime":"2025-12-08T21:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.147094 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.147136 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.147146 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.147164 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.147176 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:41Z","lastTransitionTime":"2025-12-08T21:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.250272 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.250307 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.250318 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.250336 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.250345 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:41Z","lastTransitionTime":"2025-12-08T21:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.354795 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.354868 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.354889 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.354916 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.354933 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:41Z","lastTransitionTime":"2025-12-08T21:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.427518 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:41 crc kubenswrapper[4912]: E1208 21:19:41.427693 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.427917 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:41 crc kubenswrapper[4912]: E1208 21:19:41.427966 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.428090 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:41 crc kubenswrapper[4912]: E1208 21:19:41.428147 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.428427 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:41 crc kubenswrapper[4912]: E1208 21:19:41.428473 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.457645 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.457689 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.457794 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.457862 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.457877 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:41Z","lastTransitionTime":"2025-12-08T21:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.560774 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.560819 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.560831 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.560852 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.560866 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:41Z","lastTransitionTime":"2025-12-08T21:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.663767 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.663818 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.663830 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.663849 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.663864 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:41Z","lastTransitionTime":"2025-12-08T21:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.766707 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.766744 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.766755 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.766775 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.766786 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:41Z","lastTransitionTime":"2025-12-08T21:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.869554 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.869606 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.869620 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.869640 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.869652 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:41Z","lastTransitionTime":"2025-12-08T21:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.971849 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.971910 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.971924 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.971953 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:41 crc kubenswrapper[4912]: I1208 21:19:41.971969 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:41Z","lastTransitionTime":"2025-12-08T21:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.075631 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.075683 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.075692 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.075712 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.075723 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.178380 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.178447 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.178734 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.178753 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.178762 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.284104 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.284168 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.284181 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.284202 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.284215 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.387387 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.387440 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.387455 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.387477 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.387491 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.490511 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.490574 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.490591 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.490616 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.490635 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.593579 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.593667 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.593691 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.593724 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.593748 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.696091 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.696134 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.696144 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.696161 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.696171 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.798792 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.798845 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.798855 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.798875 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.798886 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.860907 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.860961 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.860973 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.860996 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.861009 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: E1208 21:19:42.876783 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:42Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.882059 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.882123 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.882134 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.882155 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.882169 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: E1208 21:19:42.903280 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:42Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.907953 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.908005 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.908018 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.908058 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.908071 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: E1208 21:19:42.921568 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:42Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.926292 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.926346 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.926362 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.926384 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.926397 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: E1208 21:19:42.940166 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:42Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.944857 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.944897 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.944907 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.944924 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.944936 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:42 crc kubenswrapper[4912]: E1208 21:19:42.959055 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:42Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:42 crc kubenswrapper[4912]: E1208 21:19:42.959188 4912 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.960721 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.960775 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.960792 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.960817 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:42 crc kubenswrapper[4912]: I1208 21:19:42.960830 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:42Z","lastTransitionTime":"2025-12-08T21:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.062913 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.062957 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.062966 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.062981 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.062990 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:43Z","lastTransitionTime":"2025-12-08T21:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.165724 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.165798 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.165814 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.165836 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.165849 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:43Z","lastTransitionTime":"2025-12-08T21:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.268168 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.268215 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.268234 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.268255 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.268267 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:43Z","lastTransitionTime":"2025-12-08T21:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.318368 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:43 crc kubenswrapper[4912]: E1208 21:19:43.318612 4912 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:43 crc kubenswrapper[4912]: E1208 21:19:43.318742 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs podName:6f259abd-9b12-458f-975d-68996ae1265c nodeName:}" failed. No retries permitted until 2025-12-08 21:20:15.318714353 +0000 UTC m=+97.181716436 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs") pod "network-metrics-daemon-lhjln" (UID: "6f259abd-9b12-458f-975d-68996ae1265c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.371911 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.371959 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.371983 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.372001 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.372010 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:43Z","lastTransitionTime":"2025-12-08T21:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.427882 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.427943 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.428053 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.428093 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:43 crc kubenswrapper[4912]: E1208 21:19:43.428314 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:43 crc kubenswrapper[4912]: E1208 21:19:43.428529 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:43 crc kubenswrapper[4912]: E1208 21:19:43.428588 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:43 crc kubenswrapper[4912]: E1208 21:19:43.428654 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.474806 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.475146 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.475236 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.475338 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.475404 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:43Z","lastTransitionTime":"2025-12-08T21:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.578318 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.578390 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.578399 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.578420 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.578436 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:43Z","lastTransitionTime":"2025-12-08T21:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.680993 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.681498 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.681652 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.681837 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.681983 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:43Z","lastTransitionTime":"2025-12-08T21:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.784733 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.785084 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.785193 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.785298 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.785384 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:43Z","lastTransitionTime":"2025-12-08T21:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.888582 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.888634 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.888656 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.888695 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.888709 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:43Z","lastTransitionTime":"2025-12-08T21:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.991392 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.991448 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.991458 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.991476 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:43 crc kubenswrapper[4912]: I1208 21:19:43.991489 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:43Z","lastTransitionTime":"2025-12-08T21:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.093910 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.093975 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.093987 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.094007 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.094020 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:44Z","lastTransitionTime":"2025-12-08T21:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.196534 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.196585 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.196597 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.196619 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.196632 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:44Z","lastTransitionTime":"2025-12-08T21:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.300090 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.300152 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.300164 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.300182 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.300194 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:44Z","lastTransitionTime":"2025-12-08T21:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.402866 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.402934 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.402950 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.402972 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.402987 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:44Z","lastTransitionTime":"2025-12-08T21:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.505190 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.505248 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.505281 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.505304 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.505319 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:44Z","lastTransitionTime":"2025-12-08T21:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.607363 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.607402 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.607413 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.607431 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.607482 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:44Z","lastTransitionTime":"2025-12-08T21:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.710678 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.710747 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.710764 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.710786 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.710800 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:44Z","lastTransitionTime":"2025-12-08T21:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.813609 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.813685 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.813700 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.813722 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.813734 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:44Z","lastTransitionTime":"2025-12-08T21:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.916705 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.916785 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.916805 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.916832 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.916846 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:44Z","lastTransitionTime":"2025-12-08T21:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.962175 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rp5rf_959add28-5508-49d7-8fe3-404acef398b0/kube-multus/0.log" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.962240 4912 generic.go:334] "Generic (PLEG): container finished" podID="959add28-5508-49d7-8fe3-404acef398b0" containerID="ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0" exitCode=1 Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.962284 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rp5rf" event={"ID":"959add28-5508-49d7-8fe3-404acef398b0","Type":"ContainerDied","Data":"ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0"} Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.962821 4912 scope.go:117] "RemoveContainer" containerID="ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.980587 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:44Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:44 crc kubenswrapper[4912]: I1208 21:19:44.997470 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:44Z\\\",\\\"message\\\":\\\"2025-12-08T21:18:58+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6\\\\n2025-12-08T21:18:58+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6 to /host/opt/cni/bin/\\\\n2025-12-08T21:18:58Z [verbose] multus-daemon started\\\\n2025-12-08T21:18:58Z [verbose] Readiness Indicator file check\\\\n2025-12-08T21:19:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:44Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.018476 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:25Z\\\",\\\"message\\\":\\\"finition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1208 21:19:25.279291 6541 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279346 6541 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279415 6541 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279468 6541 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279604 6541 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.286304 6541 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1208 21:19:25.286428 6541 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1208 21:19:25.286508 6541 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:25.286550 6541 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:25.286666 6541 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.025783 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.025828 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.025842 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.025864 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.025878 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:45Z","lastTransitionTime":"2025-12-08T21:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.034911 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.050412 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.064729 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.078097 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.087824 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.096575 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.114061 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.129754 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.129787 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.129798 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.129830 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.129841 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:45Z","lastTransitionTime":"2025-12-08T21:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.130408 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.146086 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.158893 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.171266 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.186657 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.197172 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.212085 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.223701 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0edeae5a-f749-4fcd-8467-9933e9d4f2cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d281679731c452ae40fa10c386d51c23b18fc26dfb3ce8783107c47e6a667bac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5651df48bf6073c8643ada32272a1913692902a285c45457d7964333c863ece\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85d03388b2cf2263dae4692f2641b08edc034f489e16d1ec0e132ab65b17358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.232652 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.232690 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.232701 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.232722 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.232747 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:45Z","lastTransitionTime":"2025-12-08T21:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.335590 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.335637 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.335651 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.335673 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.335685 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:45Z","lastTransitionTime":"2025-12-08T21:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.427458 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.427507 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.427551 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.427598 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:45 crc kubenswrapper[4912]: E1208 21:19:45.427730 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:45 crc kubenswrapper[4912]: E1208 21:19:45.427836 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:45 crc kubenswrapper[4912]: E1208 21:19:45.427927 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:45 crc kubenswrapper[4912]: E1208 21:19:45.428075 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.438434 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.438473 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.438484 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.438503 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.438520 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:45Z","lastTransitionTime":"2025-12-08T21:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.540617 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.540671 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.540686 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.540709 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.540722 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:45Z","lastTransitionTime":"2025-12-08T21:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.643726 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.643783 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.643797 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.643820 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.643834 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:45Z","lastTransitionTime":"2025-12-08T21:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.747073 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.747144 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.747165 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.747191 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.747210 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:45Z","lastTransitionTime":"2025-12-08T21:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.849570 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.849612 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.849624 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.849646 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.849658 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:45Z","lastTransitionTime":"2025-12-08T21:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.951647 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.951684 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.951693 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.951708 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.951719 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:45Z","lastTransitionTime":"2025-12-08T21:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.969139 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rp5rf_959add28-5508-49d7-8fe3-404acef398b0/kube-multus/0.log" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.969206 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rp5rf" event={"ID":"959add28-5508-49d7-8fe3-404acef398b0","Type":"ContainerStarted","Data":"67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83"} Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.981752 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:45 crc kubenswrapper[4912]: I1208 21:19:45.994121 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:45Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.006707 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0edeae5a-f749-4fcd-8467-9933e9d4f2cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d281679731c452ae40fa10c386d51c23b18fc26dfb3ce8783107c47e6a667bac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5651df48bf6073c8643ada32272a1913692902a285c45457d7964333c863ece\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85d03388b2cf2263dae4692f2641b08edc034f489e16d1ec0e132ab65b17358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.018245 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.037069 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.053125 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.054753 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.054782 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.054802 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.054818 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.054830 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:46Z","lastTransitionTime":"2025-12-08T21:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.067431 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.080509 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.093686 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:44Z\\\",\\\"message\\\":\\\"2025-12-08T21:18:58+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6\\\\n2025-12-08T21:18:58+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6 to /host/opt/cni/bin/\\\\n2025-12-08T21:18:58Z [verbose] multus-daemon started\\\\n2025-12-08T21:18:58Z [verbose] Readiness Indicator file check\\\\n2025-12-08T21:19:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.117302 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:25Z\\\",\\\"message\\\":\\\"finition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1208 21:19:25.279291 6541 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279346 6541 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279415 6541 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279468 6541 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279604 6541 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.286304 6541 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1208 21:19:25.286428 6541 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1208 21:19:25.286508 6541 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:25.286550 6541 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:25.286666 6541 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.128357 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.146314 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.157118 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.157164 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.157174 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.157195 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.157208 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:46Z","lastTransitionTime":"2025-12-08T21:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.170111 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.188853 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.201838 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.213766 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.225884 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.236519 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:46Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.260181 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.260266 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.260285 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.260314 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.260333 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:46Z","lastTransitionTime":"2025-12-08T21:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.363434 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.363472 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.363483 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.363501 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.363510 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:46Z","lastTransitionTime":"2025-12-08T21:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.466803 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.466861 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.466877 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.466899 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.466915 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:46Z","lastTransitionTime":"2025-12-08T21:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.570751 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.570812 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.570838 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.574488 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.574552 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:46Z","lastTransitionTime":"2025-12-08T21:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.677383 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.677449 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.677463 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.677489 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.677506 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:46Z","lastTransitionTime":"2025-12-08T21:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.780435 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.780478 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.780489 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.780506 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.780517 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:46Z","lastTransitionTime":"2025-12-08T21:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.883479 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.883540 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.883557 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.883583 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.883598 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:46Z","lastTransitionTime":"2025-12-08T21:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.986416 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.986457 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.986469 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.986486 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:46 crc kubenswrapper[4912]: I1208 21:19:46.986499 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:46Z","lastTransitionTime":"2025-12-08T21:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.089916 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.089963 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.089971 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.089988 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.089999 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:47Z","lastTransitionTime":"2025-12-08T21:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.193081 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.193137 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.193151 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.193170 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.193185 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:47Z","lastTransitionTime":"2025-12-08T21:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.296494 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.296537 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.296550 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.296576 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.296590 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:47Z","lastTransitionTime":"2025-12-08T21:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.399251 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.399295 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.399304 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.399320 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.399331 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:47Z","lastTransitionTime":"2025-12-08T21:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.426780 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.426862 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.426789 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:47 crc kubenswrapper[4912]: E1208 21:19:47.426939 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.426863 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:47 crc kubenswrapper[4912]: E1208 21:19:47.427119 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:47 crc kubenswrapper[4912]: E1208 21:19:47.427196 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:47 crc kubenswrapper[4912]: E1208 21:19:47.427261 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.501948 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.501983 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.501991 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.502005 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.502016 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:47Z","lastTransitionTime":"2025-12-08T21:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.604597 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.604667 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.604688 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.604713 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.604731 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:47Z","lastTransitionTime":"2025-12-08T21:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.707016 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.707095 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.707107 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.707157 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.707171 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:47Z","lastTransitionTime":"2025-12-08T21:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.810604 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.810646 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.810655 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.810673 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.810684 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:47Z","lastTransitionTime":"2025-12-08T21:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.913748 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.913795 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.913806 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.913825 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:47 crc kubenswrapper[4912]: I1208 21:19:47.913836 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:47Z","lastTransitionTime":"2025-12-08T21:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.016911 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.016970 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.016983 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.017002 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.017016 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:48Z","lastTransitionTime":"2025-12-08T21:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.120095 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.120152 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.120165 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.120183 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.120193 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:48Z","lastTransitionTime":"2025-12-08T21:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.223693 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.223757 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.223772 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.223796 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.223810 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:48Z","lastTransitionTime":"2025-12-08T21:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.326257 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.326303 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.326315 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.326334 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.326346 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:48Z","lastTransitionTime":"2025-12-08T21:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.429204 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.429261 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.429272 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.429292 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.429304 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:48Z","lastTransitionTime":"2025-12-08T21:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.441596 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.457275 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.472210 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0edeae5a-f749-4fcd-8467-9933e9d4f2cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d281679731c452ae40fa10c386d51c23b18fc26dfb3ce8783107c47e6a667bac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5651df48bf6073c8643ada32272a1913692902a285c45457d7964333c863ece\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85d03388b2cf2263dae4692f2641b08edc034f489e16d1ec0e132ab65b17358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.484896 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.501023 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.518132 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.532153 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.532197 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.532206 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.532222 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.532237 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:48Z","lastTransitionTime":"2025-12-08T21:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.532230 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.550926 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.567542 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.588071 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:44Z\\\",\\\"message\\\":\\\"2025-12-08T21:18:58+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6\\\\n2025-12-08T21:18:58+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6 to /host/opt/cni/bin/\\\\n2025-12-08T21:18:58Z [verbose] multus-daemon started\\\\n2025-12-08T21:18:58Z [verbose] Readiness Indicator file check\\\\n2025-12-08T21:19:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.607820 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:25Z\\\",\\\"message\\\":\\\"finition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1208 21:19:25.279291 6541 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279346 6541 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279415 6541 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279468 6541 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279604 6541 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.286304 6541 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1208 21:19:25.286428 6541 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1208 21:19:25.286508 6541 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:25.286550 6541 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:25.286666 6541 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.627673 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.634399 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.634427 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.634436 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.634452 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.634463 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:48Z","lastTransitionTime":"2025-12-08T21:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.643716 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.656294 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.670595 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.683113 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.696824 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.712366 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:48Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.737667 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.737715 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.737726 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.737742 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.737755 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:48Z","lastTransitionTime":"2025-12-08T21:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.839746 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.839798 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.839808 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.839826 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.839836 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:48Z","lastTransitionTime":"2025-12-08T21:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.942499 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.942548 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.942556 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.942575 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:48 crc kubenswrapper[4912]: I1208 21:19:48.942590 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:48Z","lastTransitionTime":"2025-12-08T21:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.045326 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.045387 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.045397 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.045419 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.045431 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:49Z","lastTransitionTime":"2025-12-08T21:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.147730 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.147804 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.147826 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.147856 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.147875 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:49Z","lastTransitionTime":"2025-12-08T21:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.251595 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.251664 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.251677 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.251702 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.251718 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:49Z","lastTransitionTime":"2025-12-08T21:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.354277 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.354330 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.354344 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.354360 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.354372 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:49Z","lastTransitionTime":"2025-12-08T21:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.427313 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.427412 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.427435 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.427437 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:49 crc kubenswrapper[4912]: E1208 21:19:49.427551 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:49 crc kubenswrapper[4912]: E1208 21:19:49.427712 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:49 crc kubenswrapper[4912]: E1208 21:19:49.427982 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:49 crc kubenswrapper[4912]: E1208 21:19:49.428047 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.456854 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.456890 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.456903 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.456921 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.456933 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:49Z","lastTransitionTime":"2025-12-08T21:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.559885 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.559951 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.559966 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.559990 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.560006 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:49Z","lastTransitionTime":"2025-12-08T21:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.662361 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.662405 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.662418 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.662440 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.662454 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:49Z","lastTransitionTime":"2025-12-08T21:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.764421 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.764468 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.764481 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.764504 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.764517 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:49Z","lastTransitionTime":"2025-12-08T21:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.866848 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.866902 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.866912 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.866930 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.866942 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:49Z","lastTransitionTime":"2025-12-08T21:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.969627 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.969680 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.969691 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.969712 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:49 crc kubenswrapper[4912]: I1208 21:19:49.969724 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:49Z","lastTransitionTime":"2025-12-08T21:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.072790 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.072845 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.072857 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.072879 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.072892 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:50Z","lastTransitionTime":"2025-12-08T21:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.176403 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.176466 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.176482 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.176509 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.176526 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:50Z","lastTransitionTime":"2025-12-08T21:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.279216 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.279496 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.279512 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.279535 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.279570 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:50Z","lastTransitionTime":"2025-12-08T21:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.383014 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.383077 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.383088 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.383106 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.383117 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:50Z","lastTransitionTime":"2025-12-08T21:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.486235 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.486284 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.486295 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.486314 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.486326 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:50Z","lastTransitionTime":"2025-12-08T21:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.588893 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.588945 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.588958 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.588976 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.588989 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:50Z","lastTransitionTime":"2025-12-08T21:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.691936 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.691979 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.691991 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.692009 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.692020 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:50Z","lastTransitionTime":"2025-12-08T21:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.795609 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.795650 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.795660 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.795682 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.795693 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:50Z","lastTransitionTime":"2025-12-08T21:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.899384 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.899450 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.899470 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.899503 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:50 crc kubenswrapper[4912]: I1208 21:19:50.899522 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:50Z","lastTransitionTime":"2025-12-08T21:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.002969 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.003101 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.003116 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.003141 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.003157 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:51Z","lastTransitionTime":"2025-12-08T21:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.106251 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.106304 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.106316 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.106337 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.106349 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:51Z","lastTransitionTime":"2025-12-08T21:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.208806 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.208849 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.208863 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.208881 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.208893 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:51Z","lastTransitionTime":"2025-12-08T21:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.311610 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.311654 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.311666 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.311684 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.311695 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:51Z","lastTransitionTime":"2025-12-08T21:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.414863 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.414918 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.414931 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.414952 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.414965 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:51Z","lastTransitionTime":"2025-12-08T21:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.427264 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.427309 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.427310 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.427357 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:51 crc kubenswrapper[4912]: E1208 21:19:51.427457 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:51 crc kubenswrapper[4912]: E1208 21:19:51.427577 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:51 crc kubenswrapper[4912]: E1208 21:19:51.427756 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:51 crc kubenswrapper[4912]: E1208 21:19:51.427808 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.517856 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.517917 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.517933 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.517954 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.517968 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:51Z","lastTransitionTime":"2025-12-08T21:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.622408 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.622504 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.622526 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.622553 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.622591 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:51Z","lastTransitionTime":"2025-12-08T21:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.725245 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.725316 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.725325 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.725342 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.725353 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:51Z","lastTransitionTime":"2025-12-08T21:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.827436 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.827491 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.827500 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.827518 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.827529 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:51Z","lastTransitionTime":"2025-12-08T21:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.930693 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.930745 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.930763 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.930789 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:51 crc kubenswrapper[4912]: I1208 21:19:51.930807 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:51Z","lastTransitionTime":"2025-12-08T21:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.033976 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.034026 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.034063 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.034083 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.034095 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:52Z","lastTransitionTime":"2025-12-08T21:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.137678 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.137740 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.137753 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.137775 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.137790 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:52Z","lastTransitionTime":"2025-12-08T21:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.240985 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.241055 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.241065 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.241083 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.241096 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:52Z","lastTransitionTime":"2025-12-08T21:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.343558 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.343614 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.343624 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.343647 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.343657 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:52Z","lastTransitionTime":"2025-12-08T21:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.446541 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.446597 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.446616 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.446641 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.446656 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:52Z","lastTransitionTime":"2025-12-08T21:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.549585 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.549638 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.549649 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.549666 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.549676 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:52Z","lastTransitionTime":"2025-12-08T21:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.652451 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.652533 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.652553 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.652575 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.652587 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:52Z","lastTransitionTime":"2025-12-08T21:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.754853 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.754923 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.754947 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.754980 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.755002 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:52Z","lastTransitionTime":"2025-12-08T21:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.858091 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.858149 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.858166 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.858191 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.858208 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:52Z","lastTransitionTime":"2025-12-08T21:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.961189 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.961251 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.961265 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.961290 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:52 crc kubenswrapper[4912]: I1208 21:19:52.961306 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:52Z","lastTransitionTime":"2025-12-08T21:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.023871 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.025384 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.025476 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.025506 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.025530 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: E1208 21:19:53.049894 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:53Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.056112 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.056271 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.056499 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.056697 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.056923 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: E1208 21:19:53.085849 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:53Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.094418 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.094462 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.094471 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.094489 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.094498 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: E1208 21:19:53.128373 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:53Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.132911 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.133069 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.133152 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.133250 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.133326 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: E1208 21:19:53.146311 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:53Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.151769 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.151813 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.151822 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.151841 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.151852 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: E1208 21:19:53.163221 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:53Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:53 crc kubenswrapper[4912]: E1208 21:19:53.163339 4912 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.164984 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.165017 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.165052 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.165074 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.165088 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.267891 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.268292 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.268384 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.268471 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.268547 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.371265 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.371335 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.371352 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.371381 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.371399 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.427266 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.427346 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.427366 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.427266 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:53 crc kubenswrapper[4912]: E1208 21:19:53.427440 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:53 crc kubenswrapper[4912]: E1208 21:19:53.427566 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:53 crc kubenswrapper[4912]: E1208 21:19:53.427661 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:53 crc kubenswrapper[4912]: E1208 21:19:53.427708 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.429519 4912 scope.go:117] "RemoveContainer" containerID="c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.474706 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.474748 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.474757 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.474778 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.474788 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.577826 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.577891 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.577914 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.577943 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.577961 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.681927 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.681992 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.682004 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.682020 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.682043 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.784800 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.784862 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.784874 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.784893 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.784903 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.888224 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.888289 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.888302 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.888328 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.888347 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.991817 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.991861 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.991873 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.991901 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:53 crc kubenswrapper[4912]: I1208 21:19:53.991912 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:53Z","lastTransitionTime":"2025-12-08T21:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.095586 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.095628 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.095641 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.095661 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.095676 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:54Z","lastTransitionTime":"2025-12-08T21:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.198645 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.198691 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.198703 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.198725 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.198737 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:54Z","lastTransitionTime":"2025-12-08T21:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.301567 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.301632 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.301658 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.301714 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.301742 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:54Z","lastTransitionTime":"2025-12-08T21:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.404176 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.404209 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.404217 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.404232 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.404241 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:54Z","lastTransitionTime":"2025-12-08T21:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.506660 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.506744 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.506767 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.506798 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.506821 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:54Z","lastTransitionTime":"2025-12-08T21:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.610113 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.610588 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.610692 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.610830 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.610928 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:54Z","lastTransitionTime":"2025-12-08T21:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.713704 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.713738 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.713747 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.713763 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.713773 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:54Z","lastTransitionTime":"2025-12-08T21:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.817135 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.817196 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.817210 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.817276 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.817291 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:54Z","lastTransitionTime":"2025-12-08T21:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.920468 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.920902 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.920911 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.920932 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:54 crc kubenswrapper[4912]: I1208 21:19:54.920941 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:54Z","lastTransitionTime":"2025-12-08T21:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.000666 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/2.log" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.004088 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a"} Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.004821 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.022314 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.028019 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.028078 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.028090 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.028107 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.028122 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:55Z","lastTransitionTime":"2025-12-08T21:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.037213 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:44Z\\\",\\\"message\\\":\\\"2025-12-08T21:18:58+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6\\\\n2025-12-08T21:18:58+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6 to /host/opt/cni/bin/\\\\n2025-12-08T21:18:58Z [verbose] multus-daemon started\\\\n2025-12-08T21:18:58Z [verbose] Readiness Indicator file check\\\\n2025-12-08T21:19:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.059147 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:25Z\\\",\\\"message\\\":\\\"finition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1208 21:19:25.279291 6541 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279346 6541 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279415 6541 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279468 6541 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279604 6541 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.286304 6541 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1208 21:19:25.286428 6541 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1208 21:19:25.286508 6541 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:25.286550 6541 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:25.286666 6541 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.075982 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.090512 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.104227 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.119103 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.131088 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.131135 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.131145 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.131165 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.131177 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:55Z","lastTransitionTime":"2025-12-08T21:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.141622 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.151518 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.164280 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.188296 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.209242 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.231327 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0edeae5a-f749-4fcd-8467-9933e9d4f2cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d281679731c452ae40fa10c386d51c23b18fc26dfb3ce8783107c47e6a667bac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5651df48bf6073c8643ada32272a1913692902a285c45457d7964333c863ece\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85d03388b2cf2263dae4692f2641b08edc034f489e16d1ec0e132ab65b17358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.233533 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.233580 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.233596 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.233617 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.233630 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:55Z","lastTransitionTime":"2025-12-08T21:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.249861 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.263934 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.279010 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.289836 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.302908 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:55Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.336061 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.336117 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.336128 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.336146 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.336156 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:55Z","lastTransitionTime":"2025-12-08T21:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.427404 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.427492 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:55 crc kubenswrapper[4912]: E1208 21:19:55.427562 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:55 crc kubenswrapper[4912]: E1208 21:19:55.427705 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.427791 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:55 crc kubenswrapper[4912]: E1208 21:19:55.427847 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.427898 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:55 crc kubenswrapper[4912]: E1208 21:19:55.427945 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.438255 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.438283 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.438291 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.438303 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.438313 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:55Z","lastTransitionTime":"2025-12-08T21:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.541215 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.541263 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.541277 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.541302 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.541319 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:55Z","lastTransitionTime":"2025-12-08T21:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.643754 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.643836 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.643866 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.643898 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.643922 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:55Z","lastTransitionTime":"2025-12-08T21:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.747817 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.747919 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.747949 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.747991 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.748012 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:55Z","lastTransitionTime":"2025-12-08T21:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.850475 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.850567 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.850592 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.850627 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.850656 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:55Z","lastTransitionTime":"2025-12-08T21:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.953276 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.953326 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.953337 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.953356 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:55 crc kubenswrapper[4912]: I1208 21:19:55.953368 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:55Z","lastTransitionTime":"2025-12-08T21:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.056247 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.056336 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.056361 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.056395 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.056412 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:56Z","lastTransitionTime":"2025-12-08T21:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.159494 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.159561 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.159582 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.159609 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.159627 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:56Z","lastTransitionTime":"2025-12-08T21:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.261974 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.262014 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.262023 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.262058 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.262071 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:56Z","lastTransitionTime":"2025-12-08T21:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.364662 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.364715 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.364729 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.364752 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.364778 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:56Z","lastTransitionTime":"2025-12-08T21:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.467493 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.467566 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.467584 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.467608 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.467626 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:56Z","lastTransitionTime":"2025-12-08T21:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.570588 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.570671 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.570694 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.570726 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.570748 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:56Z","lastTransitionTime":"2025-12-08T21:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.673625 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.673681 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.673695 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.673717 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.673731 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:56Z","lastTransitionTime":"2025-12-08T21:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.776514 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.776577 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.776592 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.776621 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.776655 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:56Z","lastTransitionTime":"2025-12-08T21:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.879214 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.879262 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.879272 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.879314 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.879326 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:56Z","lastTransitionTime":"2025-12-08T21:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.983354 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.983488 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.983555 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.983599 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:56 crc kubenswrapper[4912]: I1208 21:19:56.983772 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:56Z","lastTransitionTime":"2025-12-08T21:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.013313 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/3.log" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.014203 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/2.log" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.018973 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a" exitCode=1 Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.019069 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a"} Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.019155 4912 scope.go:117] "RemoveContainer" containerID="c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.022688 4912 scope.go:117] "RemoveContainer" containerID="14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a" Dec 08 21:19:57 crc kubenswrapper[4912]: E1208 21:19:57.023294 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.039356 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.052696 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.069253 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.082738 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0edeae5a-f749-4fcd-8467-9933e9d4f2cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d281679731c452ae40fa10c386d51c23b18fc26dfb3ce8783107c47e6a667bac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5651df48bf6073c8643ada32272a1913692902a285c45457d7964333c863ece\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85d03388b2cf2263dae4692f2641b08edc034f489e16d1ec0e132ab65b17358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.086653 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.086700 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.086714 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.086737 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.086753 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:57Z","lastTransitionTime":"2025-12-08T21:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.095113 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.113052 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.129722 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.144831 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.157574 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.174794 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:44Z\\\",\\\"message\\\":\\\"2025-12-08T21:18:58+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6\\\\n2025-12-08T21:18:58+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6 to /host/opt/cni/bin/\\\\n2025-12-08T21:18:58Z [verbose] multus-daemon started\\\\n2025-12-08T21:18:58Z [verbose] Readiness Indicator file check\\\\n2025-12-08T21:19:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.189023 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.189085 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.189096 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.189118 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.189134 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:57Z","lastTransitionTime":"2025-12-08T21:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.195928 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:25Z\\\",\\\"message\\\":\\\"finition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1208 21:19:25.279291 6541 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279346 6541 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279415 6541 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279468 6541 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279604 6541 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.286304 6541 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1208 21:19:25.286428 6541 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1208 21:19:25.286508 6541 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:25.286550 6541 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:25.286666 6541 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:56Z\\\",\\\"message\\\":\\\"e:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1208 21:19:55.582250 6935 obj_retry.go:551] Creating *factory.egressNode crc took: 1.719333ms\\\\nI1208 21:19:55.582278 6935 factory.go:1336] Added *v1.Node event handler 7\\\\nI1208 21:19:55.582313 6935 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1208 21:19:55.582330 6935 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1208 21:19:55.582340 6935 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1208 21:19:55.582369 6935 handler.go:208] Removed *v1.Node event handler 2\\\\nI1208 21:19:55.582398 6935 handler.go:208] Removed *v1.Node event handler 7\\\\nI1208 21:19:55.582374 6935 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1208 21:19:55.582442 6935 factory.go:656] Stopping watch factory\\\\nI1208 21:19:55.582463 6935 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1208 21:19:55.582584 6935 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1208 21:19:55.582665 6935 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1208 21:19:55.582699 6935 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:55.582723 6935 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:55.582794 6935 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.212612 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.229609 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.253091 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.266523 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.281786 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.291824 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.291869 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.291880 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.291899 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.291909 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:57Z","lastTransitionTime":"2025-12-08T21:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.294775 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.305101 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:57Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.394305 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.394357 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.394369 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.394388 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.394399 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:57Z","lastTransitionTime":"2025-12-08T21:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.427550 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.427586 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.427559 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:57 crc kubenswrapper[4912]: E1208 21:19:57.427722 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:57 crc kubenswrapper[4912]: E1208 21:19:57.427845 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.427860 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:57 crc kubenswrapper[4912]: E1208 21:19:57.427929 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:57 crc kubenswrapper[4912]: E1208 21:19:57.428326 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.496538 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.496585 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.496594 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.496612 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.496622 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:57Z","lastTransitionTime":"2025-12-08T21:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.603853 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.603929 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.603949 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.603978 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.603997 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:57Z","lastTransitionTime":"2025-12-08T21:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.707017 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.707086 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.707098 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.707118 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.707130 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:57Z","lastTransitionTime":"2025-12-08T21:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.810734 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.810794 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.810814 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.810839 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.810857 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:57Z","lastTransitionTime":"2025-12-08T21:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.914566 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.914662 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.914697 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.914734 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:57 crc kubenswrapper[4912]: I1208 21:19:57.914758 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:57Z","lastTransitionTime":"2025-12-08T21:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.018758 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.018817 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.018830 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.018853 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.018867 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:58Z","lastTransitionTime":"2025-12-08T21:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.024018 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/3.log" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.121309 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.121408 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.121440 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.121475 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.121505 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:58Z","lastTransitionTime":"2025-12-08T21:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.224684 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.224736 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.224751 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.224778 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.224793 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:58Z","lastTransitionTime":"2025-12-08T21:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.328460 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.328513 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.328526 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.328545 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.328557 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:58Z","lastTransitionTime":"2025-12-08T21:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.432238 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.432277 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.432288 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.432304 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.432314 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:58Z","lastTransitionTime":"2025-12-08T21:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.441712 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.457599 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.472214 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0edeae5a-f749-4fcd-8467-9933e9d4f2cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d281679731c452ae40fa10c386d51c23b18fc26dfb3ce8783107c47e6a667bac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5651df48bf6073c8643ada32272a1913692902a285c45457d7964333c863ece\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85d03388b2cf2263dae4692f2641b08edc034f489e16d1ec0e132ab65b17358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.484634 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.498569 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.513812 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.529058 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.534307 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.534365 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.534379 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.534398 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.534408 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:58Z","lastTransitionTime":"2025-12-08T21:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.543453 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.557598 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.570292 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:44Z\\\",\\\"message\\\":\\\"2025-12-08T21:18:58+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6\\\\n2025-12-08T21:18:58+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6 to /host/opt/cni/bin/\\\\n2025-12-08T21:18:58Z [verbose] multus-daemon started\\\\n2025-12-08T21:18:58Z [verbose] Readiness Indicator file check\\\\n2025-12-08T21:19:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.595626 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:25Z\\\",\\\"message\\\":\\\"finition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1208 21:19:25.279291 6541 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279346 6541 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279415 6541 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279468 6541 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279604 6541 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.286304 6541 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1208 21:19:25.286428 6541 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1208 21:19:25.286508 6541 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:25.286550 6541 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:25.286666 6541 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:56Z\\\",\\\"message\\\":\\\"e:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1208 21:19:55.582250 6935 obj_retry.go:551] Creating *factory.egressNode crc took: 1.719333ms\\\\nI1208 21:19:55.582278 6935 factory.go:1336] Added *v1.Node event handler 7\\\\nI1208 21:19:55.582313 6935 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1208 21:19:55.582330 6935 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1208 21:19:55.582340 6935 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1208 21:19:55.582369 6935 handler.go:208] Removed *v1.Node event handler 2\\\\nI1208 21:19:55.582398 6935 handler.go:208] Removed *v1.Node event handler 7\\\\nI1208 21:19:55.582374 6935 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1208 21:19:55.582442 6935 factory.go:656] Stopping watch factory\\\\nI1208 21:19:55.582463 6935 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1208 21:19:55.582584 6935 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1208 21:19:55.582665 6935 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1208 21:19:55.582699 6935 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:55.582723 6935 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:55.582794 6935 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.626639 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.637757 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.637819 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.637835 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.637863 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.637881 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:58Z","lastTransitionTime":"2025-12-08T21:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.647700 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.682135 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.697827 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.712254 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.726862 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.752481 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.752497 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:19:58Z is after 2025-08-24T17:21:41Z" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.752545 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.752579 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.752603 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.752615 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:58Z","lastTransitionTime":"2025-12-08T21:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.855002 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.855065 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.855077 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.855096 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.855109 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:58Z","lastTransitionTime":"2025-12-08T21:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.958262 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.958332 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.958345 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.958367 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:58 crc kubenswrapper[4912]: I1208 21:19:58.958396 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:58Z","lastTransitionTime":"2025-12-08T21:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.062363 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.062415 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.062428 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.062447 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.062462 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:59Z","lastTransitionTime":"2025-12-08T21:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.166808 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.166859 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.166868 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.166887 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.166899 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:59Z","lastTransitionTime":"2025-12-08T21:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.271099 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.271156 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.271168 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.271185 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.271200 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:59Z","lastTransitionTime":"2025-12-08T21:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.374440 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.374482 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.374494 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.374510 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.374521 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:59Z","lastTransitionTime":"2025-12-08T21:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.427574 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:19:59 crc kubenswrapper[4912]: E1208 21:19:59.427797 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.428153 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:19:59 crc kubenswrapper[4912]: E1208 21:19:59.428259 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.428477 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:19:59 crc kubenswrapper[4912]: E1208 21:19:59.428587 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.429740 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:19:59 crc kubenswrapper[4912]: E1208 21:19:59.429853 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.476571 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.476610 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.476621 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.476640 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.476653 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:59Z","lastTransitionTime":"2025-12-08T21:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.580397 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.580463 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.580482 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.580514 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.580535 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:59Z","lastTransitionTime":"2025-12-08T21:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.683412 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.683452 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.683464 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.683484 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.683496 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:59Z","lastTransitionTime":"2025-12-08T21:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.786491 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.786539 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.786547 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.786565 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.786575 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:59Z","lastTransitionTime":"2025-12-08T21:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.888631 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.888677 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.888688 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.888707 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.888718 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:59Z","lastTransitionTime":"2025-12-08T21:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.992123 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.992213 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.992233 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.992253 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:19:59 crc kubenswrapper[4912]: I1208 21:19:59.992265 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:19:59Z","lastTransitionTime":"2025-12-08T21:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.094596 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.094650 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.094661 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.094677 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.094687 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:00Z","lastTransitionTime":"2025-12-08T21:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.197078 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.197157 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.197173 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.197201 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.197218 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:00Z","lastTransitionTime":"2025-12-08T21:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.301006 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.301115 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.301138 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.301361 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.301388 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:00Z","lastTransitionTime":"2025-12-08T21:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.404882 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.404946 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.404959 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.404980 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.404992 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:00Z","lastTransitionTime":"2025-12-08T21:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.511025 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.511082 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.511099 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.511118 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.511132 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:00Z","lastTransitionTime":"2025-12-08T21:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.614351 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.614393 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.614405 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.614422 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.614435 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:00Z","lastTransitionTime":"2025-12-08T21:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.718129 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.718204 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.718229 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.718266 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.718296 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:00Z","lastTransitionTime":"2025-12-08T21:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.821697 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.821761 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.821783 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.821816 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.821845 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:00Z","lastTransitionTime":"2025-12-08T21:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.934817 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.934914 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.934931 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.934960 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:00 crc kubenswrapper[4912]: I1208 21:20:00.934979 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:00Z","lastTransitionTime":"2025-12-08T21:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.038533 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.038687 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.038710 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.040356 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.040441 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:01Z","lastTransitionTime":"2025-12-08T21:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.144146 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.144226 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.144245 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.144274 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.144293 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:01Z","lastTransitionTime":"2025-12-08T21:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.248175 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.248327 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.248353 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.248387 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.248409 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:01Z","lastTransitionTime":"2025-12-08T21:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.332711 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.332945 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.332895877 +0000 UTC m=+147.195897990 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.352385 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.352447 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.352465 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.352491 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.352508 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:01Z","lastTransitionTime":"2025-12-08T21:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.427806 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.427903 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.427816 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.427938 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.428062 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.428190 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.428389 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.428422 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.434477 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.434521 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.434558 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.434567 4912 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.434625 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.434605549 +0000 UTC m=+147.297607632 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.434646 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.434727 4912 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.434744 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.434744 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.434760 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.434771 4912 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.434774 4912 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.434785 4912 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.434784 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.434767483 +0000 UTC m=+147.297769576 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.434840 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.434828044 +0000 UTC m=+147.297830147 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:20:01 crc kubenswrapper[4912]: E1208 21:20:01.434856 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.434848435 +0000 UTC m=+147.297850528 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.455363 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.455394 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.455405 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.455423 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.455434 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:01Z","lastTransitionTime":"2025-12-08T21:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.558104 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.558139 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.558148 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.558164 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.558172 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:01Z","lastTransitionTime":"2025-12-08T21:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.661221 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.661266 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.661282 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.661301 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.661313 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:01Z","lastTransitionTime":"2025-12-08T21:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.764525 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.764683 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.764704 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.764729 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.764746 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:01Z","lastTransitionTime":"2025-12-08T21:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.868077 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.868158 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.868171 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.868194 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.868207 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:01Z","lastTransitionTime":"2025-12-08T21:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.971526 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.971603 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.971628 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.971666 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:01 crc kubenswrapper[4912]: I1208 21:20:01.971690 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:01Z","lastTransitionTime":"2025-12-08T21:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.074614 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.074682 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.074700 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.074727 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.074749 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:02Z","lastTransitionTime":"2025-12-08T21:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.177590 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.177651 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.177663 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.177683 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.177695 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:02Z","lastTransitionTime":"2025-12-08T21:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.280541 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.280627 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.280640 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.280660 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.280672 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:02Z","lastTransitionTime":"2025-12-08T21:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.384148 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.384223 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.384232 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.384254 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.384269 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:02Z","lastTransitionTime":"2025-12-08T21:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.486605 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.486935 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.487022 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.487160 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.487251 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:02Z","lastTransitionTime":"2025-12-08T21:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.589777 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.589829 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.589840 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.589870 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.589886 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:02Z","lastTransitionTime":"2025-12-08T21:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.693817 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.693886 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.693908 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.693941 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.693963 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:02Z","lastTransitionTime":"2025-12-08T21:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.796847 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.796915 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.796932 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.796957 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.796977 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:02Z","lastTransitionTime":"2025-12-08T21:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.904819 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.904902 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.904915 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.904939 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:02 crc kubenswrapper[4912]: I1208 21:20:02.904954 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:02Z","lastTransitionTime":"2025-12-08T21:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.008765 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.008831 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.008844 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.008868 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.008885 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.112810 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.112890 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.112913 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.112946 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.112967 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.217011 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.217125 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.217137 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.217160 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.217172 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.243220 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.243270 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.243282 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.243301 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.243312 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: E1208 21:20:03.265454 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.273973 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.274090 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.274115 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.274151 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.274175 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: E1208 21:20:03.298814 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.304496 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.304565 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.304586 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.304620 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.304645 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: E1208 21:20:03.326241 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.330600 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.330658 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.330677 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.330699 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.330711 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: E1208 21:20:03.344137 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.347532 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.347576 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.347592 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.347615 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.347629 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: E1208 21:20:03.361058 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:03Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:03 crc kubenswrapper[4912]: E1208 21:20:03.361213 4912 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.362852 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.362886 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.362898 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.362914 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.362925 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.427895 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.427960 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:03 crc kubenswrapper[4912]: E1208 21:20:03.428720 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.428020 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:03 crc kubenswrapper[4912]: E1208 21:20:03.428834 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:03 crc kubenswrapper[4912]: E1208 21:20:03.428824 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.427966 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:03 crc kubenswrapper[4912]: E1208 21:20:03.428930 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.465726 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.465782 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.465792 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.465811 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.465826 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.569129 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.569185 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.569196 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.569215 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.569230 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.671385 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.671427 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.671435 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.671451 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.671461 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.774476 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.774546 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.774578 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.774633 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.774661 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.877362 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.877447 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.877465 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.877493 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.877518 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.981905 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.981991 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.982017 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.982094 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:03 crc kubenswrapper[4912]: I1208 21:20:03.982158 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:03Z","lastTransitionTime":"2025-12-08T21:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.086561 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.086646 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.086661 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.086683 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.086696 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:04Z","lastTransitionTime":"2025-12-08T21:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.189094 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.189147 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.189159 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.189180 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.189196 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:04Z","lastTransitionTime":"2025-12-08T21:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.291923 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.292003 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.292023 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.292061 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.292071 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:04Z","lastTransitionTime":"2025-12-08T21:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.395192 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.395261 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.395288 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.395323 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.395348 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:04Z","lastTransitionTime":"2025-12-08T21:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.498801 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.498861 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.498878 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.498904 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.498921 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:04Z","lastTransitionTime":"2025-12-08T21:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.602194 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.602246 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.602256 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.602276 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.602291 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:04Z","lastTransitionTime":"2025-12-08T21:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.704997 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.705063 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.705072 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.705089 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.705101 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:04Z","lastTransitionTime":"2025-12-08T21:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.807865 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.807914 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.807926 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.807946 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.807959 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:04Z","lastTransitionTime":"2025-12-08T21:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.910873 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.910924 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.910934 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.910952 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:04 crc kubenswrapper[4912]: I1208 21:20:04.910962 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:04Z","lastTransitionTime":"2025-12-08T21:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.015857 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.015962 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.015982 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.016015 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.016068 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:05Z","lastTransitionTime":"2025-12-08T21:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.119817 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.119861 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.119872 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.119901 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.119918 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:05Z","lastTransitionTime":"2025-12-08T21:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.222168 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.222214 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.222224 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.222242 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.222255 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:05Z","lastTransitionTime":"2025-12-08T21:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.324242 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.324284 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.324296 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.324315 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.324335 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:05Z","lastTransitionTime":"2025-12-08T21:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.427213 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.427415 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:05 crc kubenswrapper[4912]: E1208 21:20:05.427609 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:05 crc kubenswrapper[4912]: E1208 21:20:05.427751 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.428161 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.427410 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.428510 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.428542 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.428554 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.428569 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.428582 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:05Z","lastTransitionTime":"2025-12-08T21:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:05 crc kubenswrapper[4912]: E1208 21:20:05.428766 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:05 crc kubenswrapper[4912]: E1208 21:20:05.428855 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.443628 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.530960 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.531064 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.531088 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.531119 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.531140 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:05Z","lastTransitionTime":"2025-12-08T21:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.634168 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.634232 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.634251 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.634279 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.634299 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:05Z","lastTransitionTime":"2025-12-08T21:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.737553 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.737608 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.737621 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.737642 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.737657 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:05Z","lastTransitionTime":"2025-12-08T21:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.840829 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.840876 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.840894 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.840915 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.840932 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:05Z","lastTransitionTime":"2025-12-08T21:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.944548 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.944593 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.944604 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.944623 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:05 crc kubenswrapper[4912]: I1208 21:20:05.944635 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:05Z","lastTransitionTime":"2025-12-08T21:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.047292 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.047340 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.047352 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.047512 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.047528 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:06Z","lastTransitionTime":"2025-12-08T21:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.150803 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.150870 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.150886 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.150906 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.150920 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:06Z","lastTransitionTime":"2025-12-08T21:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.253880 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.253934 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.253948 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.253969 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.253984 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:06Z","lastTransitionTime":"2025-12-08T21:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.357271 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.357326 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.357340 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.357364 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.357379 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:06Z","lastTransitionTime":"2025-12-08T21:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.459931 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.459976 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.459986 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.460005 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.460016 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:06Z","lastTransitionTime":"2025-12-08T21:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.563263 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.563380 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.563392 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.563406 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.563416 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:06Z","lastTransitionTime":"2025-12-08T21:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.668220 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.668295 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.668319 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.668356 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.668380 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:06Z","lastTransitionTime":"2025-12-08T21:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.773548 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.773621 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.773645 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.773677 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.773699 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:06Z","lastTransitionTime":"2025-12-08T21:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.877390 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.877442 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.877463 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.877494 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.877515 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:06Z","lastTransitionTime":"2025-12-08T21:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.981244 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.981288 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.981300 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.981323 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:06 crc kubenswrapper[4912]: I1208 21:20:06.981345 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:06Z","lastTransitionTime":"2025-12-08T21:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.084208 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.084252 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.084261 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.084279 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.084293 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:07Z","lastTransitionTime":"2025-12-08T21:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.187250 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.187304 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.187314 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.187337 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.187349 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:07Z","lastTransitionTime":"2025-12-08T21:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.289975 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.290065 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.290079 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.290104 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.290122 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:07Z","lastTransitionTime":"2025-12-08T21:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.393081 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.393140 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.393154 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.393176 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.393192 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:07Z","lastTransitionTime":"2025-12-08T21:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.426869 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.426936 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.426971 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.426892 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:07 crc kubenswrapper[4912]: E1208 21:20:07.427110 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:07 crc kubenswrapper[4912]: E1208 21:20:07.427199 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:07 crc kubenswrapper[4912]: E1208 21:20:07.427252 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:07 crc kubenswrapper[4912]: E1208 21:20:07.427285 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.496430 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.496506 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.496525 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.496555 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.496584 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:07Z","lastTransitionTime":"2025-12-08T21:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.600365 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.600460 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.600487 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.600522 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.600548 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:07Z","lastTransitionTime":"2025-12-08T21:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.703342 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.703433 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.703446 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.703467 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.703479 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:07Z","lastTransitionTime":"2025-12-08T21:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.807796 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.807980 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.808011 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.808166 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.808195 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:07Z","lastTransitionTime":"2025-12-08T21:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.911410 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.911475 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.911489 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.911512 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:07 crc kubenswrapper[4912]: I1208 21:20:07.911524 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:07Z","lastTransitionTime":"2025-12-08T21:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.015062 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.015126 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.015140 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.015162 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.015179 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:08Z","lastTransitionTime":"2025-12-08T21:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.118541 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.118628 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.118652 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.118687 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.118711 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:08Z","lastTransitionTime":"2025-12-08T21:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.222905 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.222977 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.222987 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.223005 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.223016 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:08Z","lastTransitionTime":"2025-12-08T21:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.326081 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.326138 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.326172 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.326194 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.326218 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:08Z","lastTransitionTime":"2025-12-08T21:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.429468 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.429552 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.429593 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.429617 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.429630 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:08Z","lastTransitionTime":"2025-12-08T21:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.443741 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.459904 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.485468 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.503483 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.523614 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.531450 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.531484 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.531493 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.531510 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.531522 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:08Z","lastTransitionTime":"2025-12-08T21:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.541927 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.559523 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.573573 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.607979 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.621314 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.633744 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0edeae5a-f749-4fcd-8467-9933e9d4f2cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d281679731c452ae40fa10c386d51c23b18fc26dfb3ce8783107c47e6a667bac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5651df48bf6073c8643ada32272a1913692902a285c45457d7964333c863ece\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85d03388b2cf2263dae4692f2641b08edc034f489e16d1ec0e132ab65b17358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.635356 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.635419 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.635437 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.635654 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.635678 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:08Z","lastTransitionTime":"2025-12-08T21:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.646577 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.662832 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.679633 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.695101 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.707787 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d8ec3de-0845-4eec-a11d-13bc8c180f03\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19ba881c167a017a8b1aa79696550cac4b2d6e2d8bdbc6e029da49bbc9393b1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a45879f8e62cef7d358e00179b268b0a13b8b8e9ec6fdf6acddc1e46fc7aa947\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a45879f8e62cef7d358e00179b268b0a13b8b8e9ec6fdf6acddc1e46fc7aa947\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.722443 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.738480 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.738527 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.738536 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.738555 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.738567 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:08Z","lastTransitionTime":"2025-12-08T21:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.740894 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:44Z\\\",\\\"message\\\":\\\"2025-12-08T21:18:58+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6\\\\n2025-12-08T21:18:58+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6 to /host/opt/cni/bin/\\\\n2025-12-08T21:18:58Z [verbose] multus-daemon started\\\\n2025-12-08T21:18:58Z [verbose] Readiness Indicator file check\\\\n2025-12-08T21:19:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.768075 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c02e0c1427baeb507efd2767de4d628e1d61011f24763dcd38990f3c9beb7427\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:25Z\\\",\\\"message\\\":\\\"finition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1208 21:19:25.279291 6541 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279346 6541 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 21:19:25.279415 6541 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279468 6541 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.279604 6541 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 21:19:25.286304 6541 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1208 21:19:25.286428 6541 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1208 21:19:25.286508 6541 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:25.286550 6541 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:25.286666 6541 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:56Z\\\",\\\"message\\\":\\\"e:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1208 21:19:55.582250 6935 obj_retry.go:551] Creating *factory.egressNode crc took: 1.719333ms\\\\nI1208 21:19:55.582278 6935 factory.go:1336] Added *v1.Node event handler 7\\\\nI1208 21:19:55.582313 6935 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1208 21:19:55.582330 6935 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1208 21:19:55.582340 6935 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1208 21:19:55.582369 6935 handler.go:208] Removed *v1.Node event handler 2\\\\nI1208 21:19:55.582398 6935 handler.go:208] Removed *v1.Node event handler 7\\\\nI1208 21:19:55.582374 6935 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1208 21:19:55.582442 6935 factory.go:656] Stopping watch factory\\\\nI1208 21:19:55.582463 6935 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1208 21:19:55.582584 6935 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1208 21:19:55.582665 6935 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1208 21:19:55.582699 6935 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:55.582723 6935 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:55.582794 6935 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:08Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.841109 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.841163 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.841178 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.841198 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.841212 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:08Z","lastTransitionTime":"2025-12-08T21:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.944870 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.944931 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.944942 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.944959 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:08 crc kubenswrapper[4912]: I1208 21:20:08.944970 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:08Z","lastTransitionTime":"2025-12-08T21:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.047598 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.047667 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.047682 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.047704 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.047718 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:09Z","lastTransitionTime":"2025-12-08T21:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.150060 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.150112 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.150122 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.150140 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.150152 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:09Z","lastTransitionTime":"2025-12-08T21:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.253529 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.253619 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.253629 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.253648 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.253658 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:09Z","lastTransitionTime":"2025-12-08T21:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.356083 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.356127 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.356136 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.356153 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.356164 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:09Z","lastTransitionTime":"2025-12-08T21:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.426977 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.426997 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.427012 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.427200 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:09 crc kubenswrapper[4912]: E1208 21:20:09.427281 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:09 crc kubenswrapper[4912]: E1208 21:20:09.427454 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:09 crc kubenswrapper[4912]: E1208 21:20:09.427509 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:09 crc kubenswrapper[4912]: E1208 21:20:09.427603 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.460530 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.460609 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.460634 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.460665 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.460690 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:09Z","lastTransitionTime":"2025-12-08T21:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.563912 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.563992 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.564322 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.564691 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.564762 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:09Z","lastTransitionTime":"2025-12-08T21:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.669237 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.669319 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.669391 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.669430 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.669458 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:09Z","lastTransitionTime":"2025-12-08T21:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.774095 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.774159 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.774172 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.774194 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.774235 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:09Z","lastTransitionTime":"2025-12-08T21:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.877697 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.877735 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.877744 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.877761 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.877772 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:09Z","lastTransitionTime":"2025-12-08T21:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.981301 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.981358 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.981370 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.981391 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:09 crc kubenswrapper[4912]: I1208 21:20:09.981405 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:09Z","lastTransitionTime":"2025-12-08T21:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.084371 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.084446 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.084468 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.084497 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.084517 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:10Z","lastTransitionTime":"2025-12-08T21:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.188234 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.188316 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.188334 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.188356 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.188370 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:10Z","lastTransitionTime":"2025-12-08T21:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.291857 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.291888 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.291899 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.291915 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.291923 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:10Z","lastTransitionTime":"2025-12-08T21:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.394736 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.394801 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.394818 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.394850 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.394869 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:10Z","lastTransitionTime":"2025-12-08T21:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.500259 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.500936 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.500952 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.500973 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.500986 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:10Z","lastTransitionTime":"2025-12-08T21:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.603514 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.603580 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.603595 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.603617 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.603631 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:10Z","lastTransitionTime":"2025-12-08T21:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.707019 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.707119 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.707137 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.707164 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.707181 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:10Z","lastTransitionTime":"2025-12-08T21:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.811707 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.811778 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.811795 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.811820 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.811838 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:10Z","lastTransitionTime":"2025-12-08T21:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.915521 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.915602 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.915623 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.915651 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:10 crc kubenswrapper[4912]: I1208 21:20:10.915672 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:10Z","lastTransitionTime":"2025-12-08T21:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.018236 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.018281 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.018293 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.018316 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.018332 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:11Z","lastTransitionTime":"2025-12-08T21:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.121968 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.122063 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.122091 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.122125 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.122147 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:11Z","lastTransitionTime":"2025-12-08T21:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.225701 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.225763 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.225780 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.225809 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.225829 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:11Z","lastTransitionTime":"2025-12-08T21:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.328735 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.328818 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.328836 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.328886 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.328912 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:11Z","lastTransitionTime":"2025-12-08T21:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.427532 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.427630 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.427630 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:11 crc kubenswrapper[4912]: E1208 21:20:11.427739 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.427841 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:11 crc kubenswrapper[4912]: E1208 21:20:11.427960 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:11 crc kubenswrapper[4912]: E1208 21:20:11.427856 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:11 crc kubenswrapper[4912]: E1208 21:20:11.428106 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.431853 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.431892 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.431905 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.431924 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.431936 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:11Z","lastTransitionTime":"2025-12-08T21:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.534804 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.534864 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.534878 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.534899 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.534911 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:11Z","lastTransitionTime":"2025-12-08T21:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.638376 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.638437 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.638450 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.638470 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.638481 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:11Z","lastTransitionTime":"2025-12-08T21:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.741550 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.741591 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.741602 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.741621 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.741633 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:11Z","lastTransitionTime":"2025-12-08T21:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.845113 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.845194 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.845217 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.845244 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.845263 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:11Z","lastTransitionTime":"2025-12-08T21:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.948402 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.948762 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.948781 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.948804 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:11 crc kubenswrapper[4912]: I1208 21:20:11.948818 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:11Z","lastTransitionTime":"2025-12-08T21:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.061723 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.061789 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.061808 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.061836 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.061854 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:12Z","lastTransitionTime":"2025-12-08T21:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.165690 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.165755 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.165937 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.165967 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.165985 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:12Z","lastTransitionTime":"2025-12-08T21:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.269930 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.270010 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.270068 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.270107 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.270132 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:12Z","lastTransitionTime":"2025-12-08T21:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.373291 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.373356 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.373380 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.373413 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.373436 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:12Z","lastTransitionTime":"2025-12-08T21:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.429492 4912 scope.go:117] "RemoveContainer" containerID="14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a" Dec 08 21:20:12 crc kubenswrapper[4912]: E1208 21:20:12.430027 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.445405 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.460772 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0edeae5a-f749-4fcd-8467-9933e9d4f2cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d281679731c452ae40fa10c386d51c23b18fc26dfb3ce8783107c47e6a667bac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5651df48bf6073c8643ada32272a1913692902a285c45457d7964333c863ece\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85d03388b2cf2263dae4692f2641b08edc034f489e16d1ec0e132ab65b17358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.476971 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.477008 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.477017 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.477053 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.477065 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:12Z","lastTransitionTime":"2025-12-08T21:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.477857 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.496264 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.515291 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.528012 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.546022 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.560401 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d8ec3de-0845-4eec-a11d-13bc8c180f03\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19ba881c167a017a8b1aa79696550cac4b2d6e2d8bdbc6e029da49bbc9393b1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a45879f8e62cef7d358e00179b268b0a13b8b8e9ec6fdf6acddc1e46fc7aa947\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a45879f8e62cef7d358e00179b268b0a13b8b8e9ec6fdf6acddc1e46fc7aa947\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.577893 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.579984 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.580027 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.580072 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.580088 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.580099 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:12Z","lastTransitionTime":"2025-12-08T21:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.601296 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:44Z\\\",\\\"message\\\":\\\"2025-12-08T21:18:58+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6\\\\n2025-12-08T21:18:58+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6 to /host/opt/cni/bin/\\\\n2025-12-08T21:18:58Z [verbose] multus-daemon started\\\\n2025-12-08T21:18:58Z [verbose] Readiness Indicator file check\\\\n2025-12-08T21:19:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.634189 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:56Z\\\",\\\"message\\\":\\\"e:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1208 21:19:55.582250 6935 obj_retry.go:551] Creating *factory.egressNode crc took: 1.719333ms\\\\nI1208 21:19:55.582278 6935 factory.go:1336] Added *v1.Node event handler 7\\\\nI1208 21:19:55.582313 6935 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1208 21:19:55.582330 6935 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1208 21:19:55.582340 6935 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1208 21:19:55.582369 6935 handler.go:208] Removed *v1.Node event handler 2\\\\nI1208 21:19:55.582398 6935 handler.go:208] Removed *v1.Node event handler 7\\\\nI1208 21:19:55.582374 6935 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1208 21:19:55.582442 6935 factory.go:656] Stopping watch factory\\\\nI1208 21:19:55.582463 6935 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1208 21:19:55.582584 6935 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1208 21:19:55.582665 6935 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1208 21:19:55.582699 6935 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:55.582723 6935 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:55.582794 6935 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.659167 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.675979 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.683074 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.683365 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.683472 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.683597 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.683902 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:12Z","lastTransitionTime":"2025-12-08T21:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.691239 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.704388 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.718714 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.729579 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.741366 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.765589 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:12Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.786418 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.786459 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.786472 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.786489 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.786502 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:12Z","lastTransitionTime":"2025-12-08T21:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.889698 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.889750 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.889776 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.889850 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.889869 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:12Z","lastTransitionTime":"2025-12-08T21:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.993915 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.993969 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.993986 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.994016 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:12 crc kubenswrapper[4912]: I1208 21:20:12.994060 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:12Z","lastTransitionTime":"2025-12-08T21:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.097161 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.097224 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.097235 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.097258 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.097270 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.200298 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.200396 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.200422 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.200456 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.200485 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.303075 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.303125 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.303137 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.303157 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.303170 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.405911 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.406525 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.406658 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.406751 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.406820 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.427448 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.427486 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.427487 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.427623 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:13 crc kubenswrapper[4912]: E1208 21:20:13.427759 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:13 crc kubenswrapper[4912]: E1208 21:20:13.427889 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:13 crc kubenswrapper[4912]: E1208 21:20:13.428071 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:13 crc kubenswrapper[4912]: E1208 21:20:13.428170 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.459436 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.459781 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.459865 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.459934 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.459992 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: E1208 21:20:13.473517 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.484261 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.484314 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.484327 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.484347 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.484360 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: E1208 21:20:13.500290 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.505585 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.505808 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.505985 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.506176 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.506287 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: E1208 21:20:13.520391 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.523456 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.523494 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.523504 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.523522 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.523536 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: E1208 21:20:13.537411 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.541540 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.541592 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.541604 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.541628 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.541644 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: E1208 21:20:13.556605 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:13Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:13 crc kubenswrapper[4912]: E1208 21:20:13.556807 4912 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.559075 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.559126 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.559141 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.559164 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.559180 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.662709 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.662792 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.662812 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.662841 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.662861 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.766447 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.766758 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.766896 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.767021 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.767180 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.870647 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.870684 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.870693 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.870708 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.870717 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.973522 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.973569 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.973581 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.973603 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:13 crc kubenswrapper[4912]: I1208 21:20:13.973617 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:13Z","lastTransitionTime":"2025-12-08T21:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.076274 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.076320 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.076333 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.076357 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.076369 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:14Z","lastTransitionTime":"2025-12-08T21:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.179677 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.179737 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.179749 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.179769 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.179779 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:14Z","lastTransitionTime":"2025-12-08T21:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.282409 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.282463 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.282479 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.282502 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.282515 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:14Z","lastTransitionTime":"2025-12-08T21:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.385594 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.385638 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.385651 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.385672 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.385685 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:14Z","lastTransitionTime":"2025-12-08T21:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.488526 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.488611 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.488637 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.488673 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.488700 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:14Z","lastTransitionTime":"2025-12-08T21:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.592148 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.592272 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.592290 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.592314 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.592331 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:14Z","lastTransitionTime":"2025-12-08T21:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.695223 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.695735 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.696025 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.696282 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.696506 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:14Z","lastTransitionTime":"2025-12-08T21:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.800589 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.800656 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.800679 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.800710 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.800730 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:14Z","lastTransitionTime":"2025-12-08T21:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.904515 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.904781 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.904904 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.905058 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:14 crc kubenswrapper[4912]: I1208 21:20:14.905184 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:14Z","lastTransitionTime":"2025-12-08T21:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.008665 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.008994 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.009111 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.009210 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.009296 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:15Z","lastTransitionTime":"2025-12-08T21:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.112553 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.112603 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.112617 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.112639 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.112652 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:15Z","lastTransitionTime":"2025-12-08T21:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.215598 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.215644 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.215659 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.215677 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.215693 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:15Z","lastTransitionTime":"2025-12-08T21:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.318745 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.318805 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.318817 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.318839 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.318851 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:15Z","lastTransitionTime":"2025-12-08T21:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.401337 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:15 crc kubenswrapper[4912]: E1208 21:20:15.401568 4912 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:20:15 crc kubenswrapper[4912]: E1208 21:20:15.401654 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs podName:6f259abd-9b12-458f-975d-68996ae1265c nodeName:}" failed. No retries permitted until 2025-12-08 21:21:19.401634518 +0000 UTC m=+161.264636601 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs") pod "network-metrics-daemon-lhjln" (UID: "6f259abd-9b12-458f-975d-68996ae1265c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.420946 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.420996 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.421008 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.421028 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.421060 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:15Z","lastTransitionTime":"2025-12-08T21:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.427267 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:15 crc kubenswrapper[4912]: E1208 21:20:15.427405 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.427294 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:15 crc kubenswrapper[4912]: E1208 21:20:15.427493 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.427293 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:15 crc kubenswrapper[4912]: E1208 21:20:15.427551 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.427267 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:15 crc kubenswrapper[4912]: E1208 21:20:15.427599 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.524238 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.524305 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.524328 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.524355 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.524373 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:15Z","lastTransitionTime":"2025-12-08T21:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.633749 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.634484 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.634498 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.634518 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.634529 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:15Z","lastTransitionTime":"2025-12-08T21:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.737439 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.737484 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.737493 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.737511 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.737521 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:15Z","lastTransitionTime":"2025-12-08T21:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.840083 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.840399 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.840500 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.840641 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.840776 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:15Z","lastTransitionTime":"2025-12-08T21:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.943830 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.943891 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.943910 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.943934 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:15 crc kubenswrapper[4912]: I1208 21:20:15.943949 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:15Z","lastTransitionTime":"2025-12-08T21:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.047334 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.047411 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.047427 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.047454 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.047473 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:16Z","lastTransitionTime":"2025-12-08T21:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.150276 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.150337 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.150350 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.150372 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.150387 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:16Z","lastTransitionTime":"2025-12-08T21:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.253512 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.253575 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.253588 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.253609 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.253622 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:16Z","lastTransitionTime":"2025-12-08T21:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.357088 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.357161 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.357175 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.357197 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.357214 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:16Z","lastTransitionTime":"2025-12-08T21:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.460076 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.460145 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.460164 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.460195 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.460213 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:16Z","lastTransitionTime":"2025-12-08T21:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.563979 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.564051 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.564065 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.564086 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.564100 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:16Z","lastTransitionTime":"2025-12-08T21:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.667497 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.667577 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.667599 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.667627 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.667645 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:16Z","lastTransitionTime":"2025-12-08T21:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.770703 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.770764 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.770780 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.770803 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.770816 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:16Z","lastTransitionTime":"2025-12-08T21:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.874142 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.874192 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.874207 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.874229 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.874245 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:16Z","lastTransitionTime":"2025-12-08T21:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.976643 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.976687 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.976695 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.976713 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:16 crc kubenswrapper[4912]: I1208 21:20:16.976723 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:16Z","lastTransitionTime":"2025-12-08T21:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.080243 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.080304 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.080318 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.080348 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.080363 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:17Z","lastTransitionTime":"2025-12-08T21:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.183626 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.183726 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.183762 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.183782 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.183798 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:17Z","lastTransitionTime":"2025-12-08T21:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.286805 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.286874 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.286891 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.286918 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.286937 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:17Z","lastTransitionTime":"2025-12-08T21:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.390522 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.390593 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.390617 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.390652 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.390675 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:17Z","lastTransitionTime":"2025-12-08T21:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.427664 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.427807 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:17 crc kubenswrapper[4912]: E1208 21:20:17.427842 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.427919 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.427916 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:17 crc kubenswrapper[4912]: E1208 21:20:17.428101 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:17 crc kubenswrapper[4912]: E1208 21:20:17.428491 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:17 crc kubenswrapper[4912]: E1208 21:20:17.428650 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.493370 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.493434 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.493452 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.493478 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.493495 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:17Z","lastTransitionTime":"2025-12-08T21:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.596512 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.596594 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.596606 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.596629 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.596645 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:17Z","lastTransitionTime":"2025-12-08T21:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.700580 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.700668 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.700691 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.700726 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.700749 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:17Z","lastTransitionTime":"2025-12-08T21:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.804338 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.804405 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.804424 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.804451 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.804475 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:17Z","lastTransitionTime":"2025-12-08T21:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.907226 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.907290 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.907309 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.907335 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:17 crc kubenswrapper[4912]: I1208 21:20:17.907354 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:17Z","lastTransitionTime":"2025-12-08T21:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.011279 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.011365 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.011390 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.011424 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.011450 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:18Z","lastTransitionTime":"2025-12-08T21:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.115454 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.115550 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.115575 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.115614 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.115639 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:18Z","lastTransitionTime":"2025-12-08T21:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.218499 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.218545 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.218561 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.218581 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.218594 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:18Z","lastTransitionTime":"2025-12-08T21:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.321998 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.322081 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.322094 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.322116 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.322131 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:18Z","lastTransitionTime":"2025-12-08T21:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.428581 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.428650 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.428692 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.428728 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.428755 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:18Z","lastTransitionTime":"2025-12-08T21:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.459185 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e29526-0aba-4c37-9925-3e4e1ea6e564\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81340eef71b931be2227ddd1aa1d1a9103941f9da876d82b178f56444216a440\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e09fc772ed1b1b9c46e07bfa2743940ddad02e403810f3580ab1ff88dcc85f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def84936afd8674a1eebedb00a649d660abcac94f5378398e7a7e3b6849e3160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce4a28df5e6f53be8c186b652462973a9fa9016a108550a33aa930247a1e10fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b3d5b601eb7314b474e9d25050eb4ab5fac0d257b146ec86d7a1839cd0d2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d95736dc09bf06129d421081abf9df5a13c401f2418d43351564850b638c817\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbfa17abded596211fb9c9688615bc6e82f86c64d3fc9378a2bcd67a65e2bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3652ad2d5bde047c029cc17cf9b235492738a46984d0db2ef1137bc45ee17674\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.473494 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aec3372534b773df19ae8ad848b7694889a650e83d18831642e977bd58e7af2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.488702 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.505164 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3081dfda3dc3b48ce904b2bedf870b6add75c97c9eb2ac18612d713f0fad47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b19f1ccb95578bcecbda45ded1eb2015bdf5f462e0489cccc5f22f8966ae945\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.521116 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"831b06bd-095f-439f-a166-088c2d584933\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e519158f87e5ca1c8399543117d90693d58d2656b9784f46152d8321b2fa974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fxph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-74dp4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.534543 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.534601 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.534614 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.534635 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.534653 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:18Z","lastTransitionTime":"2025-12-08T21:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.536212 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6vbwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84c31be3-897d-43a4-9d4a-6767eeaa79de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20c74ff98d8cd91334e226db085ebe0137d5a8e43225836eee0de7bdcb7e8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c279k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6vbwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.549388 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5986eadb-31e4-483f-ab2e-e427be240400\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee65677cf9260161c0a3550adee6284b3811e63ba5642f2ebe580e88b2b081ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00d58173154a63c53a808c5db978ee30333556a594f4964f8d902b328fb33773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfj2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hdvlg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.564528 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33b574ba829461cc6a6a33c7e3c8cb4d9692245599d753018705b3c0a02648e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.582476 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c864447a-ffce-41e4-8c64-1d56bb049b43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 21:18:50.895650 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 21:18:50.897162 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2531596317/tls.crt::/tmp/serving-cert-2531596317/tls.key\\\\\\\"\\\\nI1208 21:18:56.795315 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 21:18:56.803062 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 21:18:56.803088 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 21:18:56.803115 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 21:18:56.803120 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 21:18:56.811311 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1208 21:18:56.811336 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811341 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 21:18:56.811346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 21:18:56.811349 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 21:18:56.811351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 21:18:56.811354 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1208 21:18:56.811499 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1208 21:18:56.814852 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.599505 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0edeae5a-f749-4fcd-8467-9933e9d4f2cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d281679731c452ae40fa10c386d51c23b18fc26dfb3ce8783107c47e6a667bac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5651df48bf6073c8643ada32272a1913692902a285c45457d7964333c863ece\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85d03388b2cf2263dae4692f2641b08edc034f489e16d1ec0e132ab65b17358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de875d66a041efd4605e344d75ea838f9336fff7f518544b290e177b62cfc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.610428 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-q6mfz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b73fac72-73a2-42cf-8d43-6aa187f7ba9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c734bbea0c56602db8951e7e4bc5b6f41946034f99c9ac722a94b576b7ea122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvxbk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-q6mfz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.623379 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.637386 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.637408 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.637416 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.637431 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.637441 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:18Z","lastTransitionTime":"2025-12-08T21:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.644138 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9vfng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"935c51ff-7414-4687-be92-cda52803d7b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a11e3a4de41b90e6229f99a8e8af0227aecb70605f7ef3c1c75db4c3691b7a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698eef09d01b188966ca68cc7bdf017b6298cec31886f13e062c7f19e0bb920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e71a84a21ac2408ae2b1c0f7e64f69307342f827d12945894d1a13a7cdaeb50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc8694191827e2faa4b2917fc23d34f5851a0e29ddd14ad6e0d61fc1972de31\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e2a22de29248a9e5ab8b139bdd75153228a1feea305b60f449b11df79baa949\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://044f1f24c74b58a1f22b90173a7e2cb269747350c2789730b024a3d8c00c13b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f469a41ab80ae9a94259e9cfd592e252a438390fd15007be23a1ce38a50f8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:19:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx9wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9vfng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.658545 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lhjln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f259abd-9b12-458f-975d-68996ae1265c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tnbm7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:19:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lhjln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.672916 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3110bbdb-7917-4c4e-a730-8ab6ac774502\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475ed474010d7ab6ec99e76e34ea9e99334002f30e3c2a9b6cf6fe03308bc731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c47c8461aa1169c255d2bc2050bbe2fb278fdbe1acae3e41c6ac70a471c401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f3af14f6f2c4d59c9f02974f69c0f82d16611e75a909f9d620e217abbc72ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.685629 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d8ec3de-0845-4eec-a11d-13bc8c180f03\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19ba881c167a017a8b1aa79696550cac4b2d6e2d8bdbc6e029da49bbc9393b1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a45879f8e62cef7d358e00179b268b0a13b8b8e9ec6fdf6acddc1e46fc7aa947\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a45879f8e62cef7d358e00179b268b0a13b8b8e9ec6fdf6acddc1e46fc7aa947\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.701278 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.718330 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-rp5rf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959add28-5508-49d7-8fe3-404acef398b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:19:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:44Z\\\",\\\"message\\\":\\\"2025-12-08T21:18:58+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6\\\\n2025-12-08T21:18:58+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_dfafd718-1af2-44ff-83f3-f0757b1a95d6 to /host/opt/cni/bin/\\\\n2025-12-08T21:18:58Z [verbose] multus-daemon started\\\\n2025-12-08T21:18:58Z [verbose] Readiness Indicator file check\\\\n2025-12-08T21:19:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vm968\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-rp5rf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.740796 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.740855 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.740867 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.740886 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.740898 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:18Z","lastTransitionTime":"2025-12-08T21:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.747007 4912 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57520f45-3ab9-41ea-8a10-3fa74c02f04b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T21:18:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:18:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T21:19:56Z\\\",\\\"message\\\":\\\"e:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1208 21:19:55.582250 6935 obj_retry.go:551] Creating *factory.egressNode crc took: 1.719333ms\\\\nI1208 21:19:55.582278 6935 factory.go:1336] Added *v1.Node event handler 7\\\\nI1208 21:19:55.582313 6935 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1208 21:19:55.582330 6935 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1208 21:19:55.582340 6935 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1208 21:19:55.582369 6935 handler.go:208] Removed *v1.Node event handler 2\\\\nI1208 21:19:55.582398 6935 handler.go:208] Removed *v1.Node event handler 7\\\\nI1208 21:19:55.582374 6935 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1208 21:19:55.582442 6935 factory.go:656] Stopping watch factory\\\\nI1208 21:19:55.582463 6935 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1208 21:19:55.582584 6935 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1208 21:19:55.582665 6935 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1208 21:19:55.582699 6935 ovnkube.go:599] Stopped ovnkube\\\\nI1208 21:19:55.582723 6935 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1208 21:19:55.582794 6935 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T21:19:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T21:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T21:18:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T21:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkffq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T21:18:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7qdqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:18Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.843733 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.843781 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.843796 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.843816 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.843828 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:18Z","lastTransitionTime":"2025-12-08T21:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.947192 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.947251 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.947266 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.947288 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:18 crc kubenswrapper[4912]: I1208 21:20:18.947302 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:18Z","lastTransitionTime":"2025-12-08T21:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.050425 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.050489 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.050505 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.050536 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.050552 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:19Z","lastTransitionTime":"2025-12-08T21:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.154136 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.154210 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.154235 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.154269 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.154296 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:19Z","lastTransitionTime":"2025-12-08T21:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.257114 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.257174 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.257183 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.257204 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.257217 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:19Z","lastTransitionTime":"2025-12-08T21:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.360791 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.361271 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.361373 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.361474 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.361562 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:19Z","lastTransitionTime":"2025-12-08T21:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.427308 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.427427 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.427512 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:19 crc kubenswrapper[4912]: E1208 21:20:19.427526 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:19 crc kubenswrapper[4912]: E1208 21:20:19.427653 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.427351 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:19 crc kubenswrapper[4912]: E1208 21:20:19.427757 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:19 crc kubenswrapper[4912]: E1208 21:20:19.428178 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.465117 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.465151 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.465159 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.465176 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.465185 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:19Z","lastTransitionTime":"2025-12-08T21:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.568081 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.568166 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.568182 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.568205 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.568221 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:19Z","lastTransitionTime":"2025-12-08T21:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.670621 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.670658 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.670668 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.670684 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.670694 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:19Z","lastTransitionTime":"2025-12-08T21:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.773478 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.773535 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.773548 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.773569 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.773582 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:19Z","lastTransitionTime":"2025-12-08T21:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.875688 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.875738 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.875751 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.875770 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.875781 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:19Z","lastTransitionTime":"2025-12-08T21:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.978849 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.978912 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.978986 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.979011 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:19 crc kubenswrapper[4912]: I1208 21:20:19.979025 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:19Z","lastTransitionTime":"2025-12-08T21:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.081877 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.081937 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.081953 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.081980 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.082001 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:20Z","lastTransitionTime":"2025-12-08T21:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.185654 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.185717 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.185729 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.185750 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.185763 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:20Z","lastTransitionTime":"2025-12-08T21:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.289401 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.289462 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.289475 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.289495 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.289505 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:20Z","lastTransitionTime":"2025-12-08T21:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.392264 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.392320 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.392333 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.392354 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.392368 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:20Z","lastTransitionTime":"2025-12-08T21:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.495091 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.495172 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.495199 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.495235 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.495267 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:20Z","lastTransitionTime":"2025-12-08T21:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.598266 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.598330 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.598349 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.598375 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.598393 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:20Z","lastTransitionTime":"2025-12-08T21:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.701456 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.701535 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.701553 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.701580 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.701599 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:20Z","lastTransitionTime":"2025-12-08T21:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.804978 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.805081 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.805104 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.805134 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.805153 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:20Z","lastTransitionTime":"2025-12-08T21:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.908414 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.909135 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.909163 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.909206 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:20 crc kubenswrapper[4912]: I1208 21:20:20.909245 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:20Z","lastTransitionTime":"2025-12-08T21:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.012697 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.012918 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.012974 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.013030 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.013097 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:21Z","lastTransitionTime":"2025-12-08T21:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.115606 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.115680 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.115717 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.115741 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.115755 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:21Z","lastTransitionTime":"2025-12-08T21:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.219643 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.219772 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.219796 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.219829 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.219850 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:21Z","lastTransitionTime":"2025-12-08T21:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.323271 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.323333 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.323349 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.323376 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.323395 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:21Z","lastTransitionTime":"2025-12-08T21:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.426802 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.426867 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.426828 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.426874 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.426924 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.426942 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.426955 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:21Z","lastTransitionTime":"2025-12-08T21:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:21 crc kubenswrapper[4912]: E1208 21:20:21.426977 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.426831 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.426875 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:21 crc kubenswrapper[4912]: E1208 21:20:21.427026 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:21 crc kubenswrapper[4912]: E1208 21:20:21.427172 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:21 crc kubenswrapper[4912]: E1208 21:20:21.427218 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.530211 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.530265 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.530277 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.530298 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.530310 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:21Z","lastTransitionTime":"2025-12-08T21:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.633471 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.633542 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.633553 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.633573 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.633586 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:21Z","lastTransitionTime":"2025-12-08T21:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.737182 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.737251 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.737274 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.737297 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.737312 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:21Z","lastTransitionTime":"2025-12-08T21:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.841622 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.841698 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.841723 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.841755 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.841781 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:21Z","lastTransitionTime":"2025-12-08T21:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.946603 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.946681 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.946697 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.946724 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:21 crc kubenswrapper[4912]: I1208 21:20:21.946746 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:21Z","lastTransitionTime":"2025-12-08T21:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.054086 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.054162 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.054180 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.054212 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.054231 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:22Z","lastTransitionTime":"2025-12-08T21:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.158385 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.158429 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.158442 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.158466 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.158477 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:22Z","lastTransitionTime":"2025-12-08T21:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.262172 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.262243 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.262262 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.262290 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.262310 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:22Z","lastTransitionTime":"2025-12-08T21:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.366678 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.366752 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.366773 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.366799 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.366819 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:22Z","lastTransitionTime":"2025-12-08T21:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.469582 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.469652 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.469672 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.469703 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.469742 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:22Z","lastTransitionTime":"2025-12-08T21:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.605781 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.605824 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.605833 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.605867 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.605879 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:22Z","lastTransitionTime":"2025-12-08T21:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.709129 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.709188 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.709200 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.709217 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.709227 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:22Z","lastTransitionTime":"2025-12-08T21:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.812072 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.812112 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.812126 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.812147 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.812157 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:22Z","lastTransitionTime":"2025-12-08T21:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.915012 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.915081 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.915095 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.915113 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:22 crc kubenswrapper[4912]: I1208 21:20:22.915125 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:22Z","lastTransitionTime":"2025-12-08T21:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.017671 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.017722 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.017737 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.017760 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.017774 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.119976 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.120023 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.120069 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.120094 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.120108 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.224625 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.224687 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.224705 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.224737 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.224756 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.327661 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.327733 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.327756 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.327788 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.327812 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.427117 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.427159 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.427267 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:23 crc kubenswrapper[4912]: E1208 21:20:23.427384 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.427475 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:23 crc kubenswrapper[4912]: E1208 21:20:23.427761 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:23 crc kubenswrapper[4912]: E1208 21:20:23.427816 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:23 crc kubenswrapper[4912]: E1208 21:20:23.427896 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.430683 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.430734 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.430752 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.430811 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.430829 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.533078 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.533181 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.533195 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.533220 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.533236 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.636250 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.636301 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.636313 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.636333 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.636346 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.739644 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.739712 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.739730 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.739759 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.739781 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.752689 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.752765 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.752789 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.752822 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.752840 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: E1208 21:20:23.774373 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:23Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.780842 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.780917 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.780936 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.780965 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.780990 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: E1208 21:20:23.799752 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:23Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.804861 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.804913 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.804928 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.804953 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.804969 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: E1208 21:20:23.825997 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:23Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.830516 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.830578 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.830596 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.830659 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.830679 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: E1208 21:20:23.850453 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:23Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.857659 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.857705 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.857717 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.857739 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.857753 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: E1208 21:20:23.872328 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T21:20:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f3bfd3da-2c7d-428e-8d8d-fcbf1998f292\\\",\\\"systemUUID\\\":\\\"c284651b-3329-4e29-9d38-e509676bbc7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T21:20:23Z is after 2025-08-24T17:21:41Z" Dec 08 21:20:23 crc kubenswrapper[4912]: E1208 21:20:23.872523 4912 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.874314 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.874365 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.874381 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.874402 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.874414 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.978251 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.978313 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.978340 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.978368 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:23 crc kubenswrapper[4912]: I1208 21:20:23.978386 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:23Z","lastTransitionTime":"2025-12-08T21:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.081721 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.081806 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.081831 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.081865 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.081888 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:24Z","lastTransitionTime":"2025-12-08T21:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.186000 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.186082 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.186096 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.186119 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.186131 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:24Z","lastTransitionTime":"2025-12-08T21:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.289820 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.289886 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.289909 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.289932 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.289944 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:24Z","lastTransitionTime":"2025-12-08T21:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.393084 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.393145 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.393166 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.393189 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.393202 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:24Z","lastTransitionTime":"2025-12-08T21:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.496078 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.496145 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.496163 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.496201 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.496243 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:24Z","lastTransitionTime":"2025-12-08T21:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.599436 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.599508 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.599529 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.599564 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.599589 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:24Z","lastTransitionTime":"2025-12-08T21:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.702824 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.702908 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.702922 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.702950 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.702970 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:24Z","lastTransitionTime":"2025-12-08T21:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.808098 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.808168 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.808193 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.808228 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.808253 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:24Z","lastTransitionTime":"2025-12-08T21:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.912138 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.912226 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.912244 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.912275 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:24 crc kubenswrapper[4912]: I1208 21:20:24.912301 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:24Z","lastTransitionTime":"2025-12-08T21:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.015149 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.015210 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.015225 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.015247 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.015261 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:25Z","lastTransitionTime":"2025-12-08T21:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.118812 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.118910 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.118937 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.118970 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.118996 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:25Z","lastTransitionTime":"2025-12-08T21:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.222381 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.222436 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.222445 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.222465 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.222477 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:25Z","lastTransitionTime":"2025-12-08T21:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.324792 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.324834 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.324845 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.324867 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.324882 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:25Z","lastTransitionTime":"2025-12-08T21:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.426754 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.426957 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.427263 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.427297 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:25 crc kubenswrapper[4912]: E1208 21:20:25.427433 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:25 crc kubenswrapper[4912]: E1208 21:20:25.427558 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:25 crc kubenswrapper[4912]: E1208 21:20:25.427628 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.427742 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.427767 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.427782 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.427801 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.427813 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:25Z","lastTransitionTime":"2025-12-08T21:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:25 crc kubenswrapper[4912]: E1208 21:20:25.427933 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.532142 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.532229 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.532260 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.532300 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.532326 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:25Z","lastTransitionTime":"2025-12-08T21:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.635622 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.635714 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.635741 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.635773 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.635795 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:25Z","lastTransitionTime":"2025-12-08T21:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.739825 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.739917 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.739943 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.739975 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.740001 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:25Z","lastTransitionTime":"2025-12-08T21:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.845914 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.846001 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.846021 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.846521 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.846761 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:25Z","lastTransitionTime":"2025-12-08T21:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.949591 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.949630 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.949640 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.949660 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:25 crc kubenswrapper[4912]: I1208 21:20:25.949671 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:25Z","lastTransitionTime":"2025-12-08T21:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.052281 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.052370 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.052390 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.052419 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.052439 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:26Z","lastTransitionTime":"2025-12-08T21:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.156500 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.156608 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.156633 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.157140 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.157463 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:26Z","lastTransitionTime":"2025-12-08T21:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.260628 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.260681 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.260699 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.260725 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.260740 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:26Z","lastTransitionTime":"2025-12-08T21:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.364686 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.364742 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.364754 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.364776 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.364789 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:26Z","lastTransitionTime":"2025-12-08T21:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.428890 4912 scope.go:117] "RemoveContainer" containerID="14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a" Dec 08 21:20:26 crc kubenswrapper[4912]: E1208 21:20:26.429139 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7qdqq_openshift-ovn-kubernetes(57520f45-3ab9-41ea-8a10-3fa74c02f04b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.467173 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.467246 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.467258 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.467302 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.467318 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:26Z","lastTransitionTime":"2025-12-08T21:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.569890 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.569955 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.569967 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.569985 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.569996 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:26Z","lastTransitionTime":"2025-12-08T21:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.673072 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.673126 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.673140 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.673161 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.673174 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:26Z","lastTransitionTime":"2025-12-08T21:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.775410 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.775462 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.775476 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.775495 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.775507 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:26Z","lastTransitionTime":"2025-12-08T21:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.879014 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.879077 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.879092 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.879112 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.879123 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:26Z","lastTransitionTime":"2025-12-08T21:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.981401 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.981510 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.981529 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.981556 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:26 crc kubenswrapper[4912]: I1208 21:20:26.981576 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:26Z","lastTransitionTime":"2025-12-08T21:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.084987 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.085100 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.085126 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.085157 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.085178 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:27Z","lastTransitionTime":"2025-12-08T21:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.188141 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.188183 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.188196 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.188215 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.188228 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:27Z","lastTransitionTime":"2025-12-08T21:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.291104 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.291177 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.291204 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.291241 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.291266 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:27Z","lastTransitionTime":"2025-12-08T21:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.395422 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.395490 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.395513 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.395546 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.395571 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:27Z","lastTransitionTime":"2025-12-08T21:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.427455 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.427461 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:27 crc kubenswrapper[4912]: E1208 21:20:27.427648 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.427721 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.427477 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:27 crc kubenswrapper[4912]: E1208 21:20:27.427989 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:27 crc kubenswrapper[4912]: E1208 21:20:27.427816 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:27 crc kubenswrapper[4912]: E1208 21:20:27.427795 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.498126 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.498184 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.498196 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.498217 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.498232 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:27Z","lastTransitionTime":"2025-12-08T21:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.600964 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.601014 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.601026 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.601063 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.601074 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:27Z","lastTransitionTime":"2025-12-08T21:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.703796 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.703840 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.703852 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.703872 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.703885 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:27Z","lastTransitionTime":"2025-12-08T21:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.807293 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.807326 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.807334 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.807350 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.807359 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:27Z","lastTransitionTime":"2025-12-08T21:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.910595 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.910704 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.910737 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.910772 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:27 crc kubenswrapper[4912]: I1208 21:20:27.910795 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:27Z","lastTransitionTime":"2025-12-08T21:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.014553 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.014624 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.014644 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.014670 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.014688 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:28Z","lastTransitionTime":"2025-12-08T21:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.117582 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.117664 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.117691 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.117726 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.117750 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:28Z","lastTransitionTime":"2025-12-08T21:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.220475 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.220523 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.220538 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.220561 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.220575 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:28Z","lastTransitionTime":"2025-12-08T21:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.323525 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.323577 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.323591 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.323612 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.323625 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:28Z","lastTransitionTime":"2025-12-08T21:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.426383 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.426438 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.426448 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.426466 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.426477 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:28Z","lastTransitionTime":"2025-12-08T21:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.472991 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=91.472969866 podStartE2EDuration="1m31.472969866s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:28.458208908 +0000 UTC m=+110.321210991" watchObservedRunningTime="2025-12-08 21:20:28.472969866 +0000 UTC m=+110.335971939" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.485586 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=58.485556979 podStartE2EDuration="58.485556979s" podCreationTimestamp="2025-12-08 21:19:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:28.473352266 +0000 UTC m=+110.336354349" watchObservedRunningTime="2025-12-08 21:20:28.485556979 +0000 UTC m=+110.348559062" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.485954 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-q6mfz" podStartSLOduration=92.485948009 podStartE2EDuration="1m32.485948009s" podCreationTimestamp="2025-12-08 21:18:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:28.485899048 +0000 UTC m=+110.348901131" watchObservedRunningTime="2025-12-08 21:20:28.485948009 +0000 UTC m=+110.348950092" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.530714 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.530754 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.530766 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.530787 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.530800 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:28Z","lastTransitionTime":"2025-12-08T21:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.531883 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-9vfng" podStartSLOduration=91.531858861 podStartE2EDuration="1m31.531858861s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:28.517259509 +0000 UTC m=+110.380261612" watchObservedRunningTime="2025-12-08 21:20:28.531858861 +0000 UTC m=+110.394860944" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.564714 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=23.564691598 podStartE2EDuration="23.564691598s" podCreationTimestamp="2025-12-08 21:20:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:28.563629752 +0000 UTC m=+110.426631835" watchObservedRunningTime="2025-12-08 21:20:28.564691598 +0000 UTC m=+110.427693681" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.566571 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=92.566537654 podStartE2EDuration="1m32.566537654s" podCreationTimestamp="2025-12-08 21:18:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:28.550579277 +0000 UTC m=+110.413581370" watchObservedRunningTime="2025-12-08 21:20:28.566537654 +0000 UTC m=+110.429539737" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.591123 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-rp5rf" podStartSLOduration=91.591101096 podStartE2EDuration="1m31.591101096s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:28.59046239 +0000 UTC m=+110.453464473" watchObservedRunningTime="2025-12-08 21:20:28.591101096 +0000 UTC m=+110.454103189" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.631103 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hdvlg" podStartSLOduration=91.631077331 podStartE2EDuration="1m31.631077331s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:28.631004499 +0000 UTC m=+110.494006582" watchObservedRunningTime="2025-12-08 21:20:28.631077331 +0000 UTC m=+110.494079414" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.633161 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.633207 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.633217 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.633235 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.633244 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:28Z","lastTransitionTime":"2025-12-08T21:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.657927 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=87.657905689 podStartE2EDuration="1m27.657905689s" podCreationTimestamp="2025-12-08 21:19:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:28.657858778 +0000 UTC m=+110.520860881" watchObservedRunningTime="2025-12-08 21:20:28.657905689 +0000 UTC m=+110.520907772" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.735627 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.735667 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.735675 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.735692 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.735705 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:28Z","lastTransitionTime":"2025-12-08T21:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.737241 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-6vbwt" podStartSLOduration=92.737228654 podStartE2EDuration="1m32.737228654s" podCreationTimestamp="2025-12-08 21:18:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:28.734328002 +0000 UTC m=+110.597330085" watchObservedRunningTime="2025-12-08 21:20:28.737228654 +0000 UTC m=+110.600230737" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.737518 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podStartSLOduration=91.737511681 podStartE2EDuration="1m31.737511681s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:28.708900108 +0000 UTC m=+110.571902191" watchObservedRunningTime="2025-12-08 21:20:28.737511681 +0000 UTC m=+110.600513764" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.838313 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.838375 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.838391 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.838416 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.838434 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:28Z","lastTransitionTime":"2025-12-08T21:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.941553 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.941609 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.941622 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.941643 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:28 crc kubenswrapper[4912]: I1208 21:20:28.941656 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:28Z","lastTransitionTime":"2025-12-08T21:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.044274 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.044340 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.044355 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.044400 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.044416 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:29Z","lastTransitionTime":"2025-12-08T21:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.147229 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.147288 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.147303 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.147324 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.147337 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:29Z","lastTransitionTime":"2025-12-08T21:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.249637 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.249686 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.249698 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.249718 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.249731 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:29Z","lastTransitionTime":"2025-12-08T21:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.352349 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.352390 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.352399 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.352417 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.352426 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:29Z","lastTransitionTime":"2025-12-08T21:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.427490 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.427535 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.427627 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.427525 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:29 crc kubenswrapper[4912]: E1208 21:20:29.427836 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:29 crc kubenswrapper[4912]: E1208 21:20:29.427986 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:29 crc kubenswrapper[4912]: E1208 21:20:29.428217 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:29 crc kubenswrapper[4912]: E1208 21:20:29.428345 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.455233 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.455283 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.455295 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.455315 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.455325 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:29Z","lastTransitionTime":"2025-12-08T21:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.558343 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.558411 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.558434 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.558468 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.558493 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:29Z","lastTransitionTime":"2025-12-08T21:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.662216 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.662293 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.662307 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.662331 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.662346 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:29Z","lastTransitionTime":"2025-12-08T21:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.764380 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.764419 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.764430 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.764447 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.764459 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:29Z","lastTransitionTime":"2025-12-08T21:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.867274 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.867354 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.867380 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.867508 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.867586 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:29Z","lastTransitionTime":"2025-12-08T21:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.970415 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.970460 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.970469 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.970488 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:29 crc kubenswrapper[4912]: I1208 21:20:29.970498 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:29Z","lastTransitionTime":"2025-12-08T21:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.073231 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.073291 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.073309 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.073336 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.073356 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:30Z","lastTransitionTime":"2025-12-08T21:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.175473 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.175544 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.175562 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.175581 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.175592 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:30Z","lastTransitionTime":"2025-12-08T21:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.278533 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.278598 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.278618 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.278644 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.278662 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:30Z","lastTransitionTime":"2025-12-08T21:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.381947 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.381987 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.381996 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.382012 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.382021 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:30Z","lastTransitionTime":"2025-12-08T21:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.485042 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.485100 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.485112 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.485132 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.485143 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:30Z","lastTransitionTime":"2025-12-08T21:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.588461 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.588553 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.588578 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.588612 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.588632 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:30Z","lastTransitionTime":"2025-12-08T21:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.691756 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.691840 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.691881 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.691917 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.691940 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:30Z","lastTransitionTime":"2025-12-08T21:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.795794 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.795935 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.795955 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.795984 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.796006 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:30Z","lastTransitionTime":"2025-12-08T21:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.899122 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.899200 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.899225 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.899258 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:30 crc kubenswrapper[4912]: I1208 21:20:30.899280 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:30Z","lastTransitionTime":"2025-12-08T21:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.002823 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.002903 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.002926 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.002962 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.002980 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:31Z","lastTransitionTime":"2025-12-08T21:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.106531 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.106602 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.106618 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.106641 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.106656 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:31Z","lastTransitionTime":"2025-12-08T21:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.149164 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rp5rf_959add28-5508-49d7-8fe3-404acef398b0/kube-multus/1.log" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.149941 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rp5rf_959add28-5508-49d7-8fe3-404acef398b0/kube-multus/0.log" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.150026 4912 generic.go:334] "Generic (PLEG): container finished" podID="959add28-5508-49d7-8fe3-404acef398b0" containerID="67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83" exitCode=1 Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.150132 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rp5rf" event={"ID":"959add28-5508-49d7-8fe3-404acef398b0","Type":"ContainerDied","Data":"67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83"} Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.150211 4912 scope.go:117] "RemoveContainer" containerID="ca434a4242b92828700b978d7051c7200fb028128b7d67068ca09c7bafdea1b0" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.150821 4912 scope.go:117] "RemoveContainer" containerID="67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83" Dec 08 21:20:31 crc kubenswrapper[4912]: E1208 21:20:31.152232 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-rp5rf_openshift-multus(959add28-5508-49d7-8fe3-404acef398b0)\"" pod="openshift-multus/multus-rp5rf" podUID="959add28-5508-49d7-8fe3-404acef398b0" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.209590 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.209620 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.209629 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.209645 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.209655 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:31Z","lastTransitionTime":"2025-12-08T21:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.312603 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.312666 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.312682 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.312703 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.312717 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:31Z","lastTransitionTime":"2025-12-08T21:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.415642 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.415706 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.415721 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.415744 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.415759 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:31Z","lastTransitionTime":"2025-12-08T21:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.427544 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.427571 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.427577 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.427577 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:31 crc kubenswrapper[4912]: E1208 21:20:31.427683 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:31 crc kubenswrapper[4912]: E1208 21:20:31.427838 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:31 crc kubenswrapper[4912]: E1208 21:20:31.427917 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:31 crc kubenswrapper[4912]: E1208 21:20:31.428139 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.519255 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.519315 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.519327 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.519346 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.519360 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:31Z","lastTransitionTime":"2025-12-08T21:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.621872 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.621908 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.621917 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.621935 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.621947 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:31Z","lastTransitionTime":"2025-12-08T21:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.724278 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.724332 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.724346 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.724366 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.724378 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:31Z","lastTransitionTime":"2025-12-08T21:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.827878 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.827989 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.828007 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.828043 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.828087 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:31Z","lastTransitionTime":"2025-12-08T21:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.930964 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.931013 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.931025 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.931061 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:31 crc kubenswrapper[4912]: I1208 21:20:31.931074 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:31Z","lastTransitionTime":"2025-12-08T21:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.034366 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.034402 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.034412 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.034447 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.034468 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:32Z","lastTransitionTime":"2025-12-08T21:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.137545 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.138406 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.138488 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.138573 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.138631 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:32Z","lastTransitionTime":"2025-12-08T21:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.157539 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rp5rf_959add28-5508-49d7-8fe3-404acef398b0/kube-multus/1.log" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.241545 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.241922 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.241996 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.242104 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.242173 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:32Z","lastTransitionTime":"2025-12-08T21:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.346171 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.346220 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.346235 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.346259 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.346276 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:32Z","lastTransitionTime":"2025-12-08T21:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.448760 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.448804 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.448814 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.448833 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.448845 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:32Z","lastTransitionTime":"2025-12-08T21:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.551953 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.552080 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.552111 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.552144 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.552170 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:32Z","lastTransitionTime":"2025-12-08T21:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.655384 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.655443 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.655469 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.655501 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.655515 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:32Z","lastTransitionTime":"2025-12-08T21:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.758237 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.758275 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.758284 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.758300 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.758316 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:32Z","lastTransitionTime":"2025-12-08T21:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.861082 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.861145 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.861162 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.861182 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.861192 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:32Z","lastTransitionTime":"2025-12-08T21:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.965377 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.965472 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.965490 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.965516 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:32 crc kubenswrapper[4912]: I1208 21:20:32.965534 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:32Z","lastTransitionTime":"2025-12-08T21:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.068222 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.068278 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.068293 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.068314 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.068328 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:33Z","lastTransitionTime":"2025-12-08T21:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.172394 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.172448 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.172463 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.172483 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.172496 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:33Z","lastTransitionTime":"2025-12-08T21:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.275748 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.275821 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.275849 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.275896 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.275921 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:33Z","lastTransitionTime":"2025-12-08T21:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.378576 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.378623 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.378634 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.378652 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.378664 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:33Z","lastTransitionTime":"2025-12-08T21:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.427762 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.427856 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.427836 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:33 crc kubenswrapper[4912]: E1208 21:20:33.428183 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.428350 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:33 crc kubenswrapper[4912]: E1208 21:20:33.428630 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:33 crc kubenswrapper[4912]: E1208 21:20:33.429146 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:33 crc kubenswrapper[4912]: E1208 21:20:33.429466 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.482563 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.482619 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.482630 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.482651 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.482663 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:33Z","lastTransitionTime":"2025-12-08T21:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.585515 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.585585 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.585597 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.585626 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.585638 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:33Z","lastTransitionTime":"2025-12-08T21:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.688644 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.688689 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.688701 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.688720 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.688732 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:33Z","lastTransitionTime":"2025-12-08T21:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.791811 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.791874 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.791886 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.791908 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.791943 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:33Z","lastTransitionTime":"2025-12-08T21:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.894455 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.894490 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.894501 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.894519 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.894533 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:33Z","lastTransitionTime":"2025-12-08T21:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.997177 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.997232 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.997244 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.997264 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:33 crc kubenswrapper[4912]: I1208 21:20:33.997277 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:33Z","lastTransitionTime":"2025-12-08T21:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.107779 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.107849 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.107862 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.107885 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.107899 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:34Z","lastTransitionTime":"2025-12-08T21:20:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.212340 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.212406 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.212422 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.212447 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.212462 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:34Z","lastTransitionTime":"2025-12-08T21:20:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.269012 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.269104 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.269117 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.269136 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.269148 4912 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T21:20:34Z","lastTransitionTime":"2025-12-08T21:20:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.337254 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc"] Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.338208 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.342078 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.342235 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.342244 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.342551 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.444755 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/2230fee9-116b-4617-9a99-dba10c0e08fc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.444881 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2230fee9-116b-4617-9a99-dba10c0e08fc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.444971 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/2230fee9-116b-4617-9a99-dba10c0e08fc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.445091 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2230fee9-116b-4617-9a99-dba10c0e08fc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.445127 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2230fee9-116b-4617-9a99-dba10c0e08fc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.546768 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2230fee9-116b-4617-9a99-dba10c0e08fc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.546809 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2230fee9-116b-4617-9a99-dba10c0e08fc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.546830 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/2230fee9-116b-4617-9a99-dba10c0e08fc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.546857 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2230fee9-116b-4617-9a99-dba10c0e08fc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.546896 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/2230fee9-116b-4617-9a99-dba10c0e08fc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.546973 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/2230fee9-116b-4617-9a99-dba10c0e08fc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.547016 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/2230fee9-116b-4617-9a99-dba10c0e08fc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.549509 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2230fee9-116b-4617-9a99-dba10c0e08fc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.554234 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2230fee9-116b-4617-9a99-dba10c0e08fc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.565351 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2230fee9-116b-4617-9a99-dba10c0e08fc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9phzc\" (UID: \"2230fee9-116b-4617-9a99-dba10c0e08fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: I1208 21:20:34.653777 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" Dec 08 21:20:34 crc kubenswrapper[4912]: W1208 21:20:34.669504 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2230fee9_116b_4617_9a99_dba10c0e08fc.slice/crio-663b38214daef4d958e32736f1e6b86194437bca8254f72ef1514d717b189358 WatchSource:0}: Error finding container 663b38214daef4d958e32736f1e6b86194437bca8254f72ef1514d717b189358: Status 404 returned error can't find the container with id 663b38214daef4d958e32736f1e6b86194437bca8254f72ef1514d717b189358 Dec 08 21:20:35 crc kubenswrapper[4912]: I1208 21:20:35.170997 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" event={"ID":"2230fee9-116b-4617-9a99-dba10c0e08fc","Type":"ContainerStarted","Data":"663b38214daef4d958e32736f1e6b86194437bca8254f72ef1514d717b189358"} Dec 08 21:20:35 crc kubenswrapper[4912]: I1208 21:20:35.427173 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:35 crc kubenswrapper[4912]: I1208 21:20:35.427213 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:35 crc kubenswrapper[4912]: I1208 21:20:35.427365 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:35 crc kubenswrapper[4912]: E1208 21:20:35.427571 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:35 crc kubenswrapper[4912]: I1208 21:20:35.427611 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:35 crc kubenswrapper[4912]: E1208 21:20:35.427823 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:35 crc kubenswrapper[4912]: E1208 21:20:35.427899 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:35 crc kubenswrapper[4912]: E1208 21:20:35.427979 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:36 crc kubenswrapper[4912]: I1208 21:20:36.176310 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" event={"ID":"2230fee9-116b-4617-9a99-dba10c0e08fc","Type":"ContainerStarted","Data":"2055474e7dbfd2d495de78d7bd617f417ff3ce287d0827cee31c37f7cef2c9fe"} Dec 08 21:20:36 crc kubenswrapper[4912]: I1208 21:20:36.191241 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9phzc" podStartSLOduration=99.1912139 podStartE2EDuration="1m39.1912139s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:36.190894482 +0000 UTC m=+118.053896565" watchObservedRunningTime="2025-12-08 21:20:36.1912139 +0000 UTC m=+118.054215983" Dec 08 21:20:37 crc kubenswrapper[4912]: I1208 21:20:37.427405 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:37 crc kubenswrapper[4912]: I1208 21:20:37.427440 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:37 crc kubenswrapper[4912]: I1208 21:20:37.427582 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:37 crc kubenswrapper[4912]: E1208 21:20:37.427586 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:37 crc kubenswrapper[4912]: I1208 21:20:37.427667 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:37 crc kubenswrapper[4912]: E1208 21:20:37.427848 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:37 crc kubenswrapper[4912]: E1208 21:20:37.427905 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:37 crc kubenswrapper[4912]: E1208 21:20:37.427986 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:38 crc kubenswrapper[4912]: E1208 21:20:38.463621 4912 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 08 21:20:38 crc kubenswrapper[4912]: E1208 21:20:38.529362 4912 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:20:39 crc kubenswrapper[4912]: I1208 21:20:39.427562 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:39 crc kubenswrapper[4912]: I1208 21:20:39.427577 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:39 crc kubenswrapper[4912]: E1208 21:20:39.428149 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:39 crc kubenswrapper[4912]: I1208 21:20:39.427678 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:39 crc kubenswrapper[4912]: I1208 21:20:39.427599 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:39 crc kubenswrapper[4912]: E1208 21:20:39.428290 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:39 crc kubenswrapper[4912]: E1208 21:20:39.428387 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:39 crc kubenswrapper[4912]: E1208 21:20:39.428474 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:40 crc kubenswrapper[4912]: I1208 21:20:40.428629 4912 scope.go:117] "RemoveContainer" containerID="14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a" Dec 08 21:20:41 crc kubenswrapper[4912]: I1208 21:20:41.427052 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:41 crc kubenswrapper[4912]: I1208 21:20:41.427097 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:41 crc kubenswrapper[4912]: I1208 21:20:41.427191 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:41 crc kubenswrapper[4912]: E1208 21:20:41.427231 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:41 crc kubenswrapper[4912]: I1208 21:20:41.427237 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:41 crc kubenswrapper[4912]: E1208 21:20:41.427339 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:41 crc kubenswrapper[4912]: E1208 21:20:41.427531 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:41 crc kubenswrapper[4912]: E1208 21:20:41.427570 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:42 crc kubenswrapper[4912]: I1208 21:20:42.114132 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-lhjln"] Dec 08 21:20:42 crc kubenswrapper[4912]: I1208 21:20:42.200627 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/3.log" Dec 08 21:20:42 crc kubenswrapper[4912]: I1208 21:20:42.203836 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerStarted","Data":"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab"} Dec 08 21:20:42 crc kubenswrapper[4912]: I1208 21:20:42.203886 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:42 crc kubenswrapper[4912]: E1208 21:20:42.204065 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:42 crc kubenswrapper[4912]: I1208 21:20:42.204431 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:20:43 crc kubenswrapper[4912]: I1208 21:20:43.427018 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:43 crc kubenswrapper[4912]: I1208 21:20:43.427068 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:43 crc kubenswrapper[4912]: I1208 21:20:43.427263 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:43 crc kubenswrapper[4912]: I1208 21:20:43.427422 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:43 crc kubenswrapper[4912]: E1208 21:20:43.427413 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:43 crc kubenswrapper[4912]: E1208 21:20:43.427551 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:43 crc kubenswrapper[4912]: E1208 21:20:43.427668 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:43 crc kubenswrapper[4912]: E1208 21:20:43.427843 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:43 crc kubenswrapper[4912]: E1208 21:20:43.530609 4912 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:20:45 crc kubenswrapper[4912]: I1208 21:20:45.427511 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:45 crc kubenswrapper[4912]: I1208 21:20:45.427606 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:45 crc kubenswrapper[4912]: I1208 21:20:45.427511 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:45 crc kubenswrapper[4912]: I1208 21:20:45.427669 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:45 crc kubenswrapper[4912]: E1208 21:20:45.427703 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:45 crc kubenswrapper[4912]: E1208 21:20:45.427838 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:45 crc kubenswrapper[4912]: E1208 21:20:45.427941 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:45 crc kubenswrapper[4912]: E1208 21:20:45.428241 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:46 crc kubenswrapper[4912]: I1208 21:20:46.427949 4912 scope.go:117] "RemoveContainer" containerID="67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83" Dec 08 21:20:46 crc kubenswrapper[4912]: I1208 21:20:46.456823 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podStartSLOduration=109.456786361 podStartE2EDuration="1m49.456786361s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:42.236608071 +0000 UTC m=+124.099610164" watchObservedRunningTime="2025-12-08 21:20:46.456786361 +0000 UTC m=+128.319788494" Dec 08 21:20:47 crc kubenswrapper[4912]: I1208 21:20:47.226312 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rp5rf_959add28-5508-49d7-8fe3-404acef398b0/kube-multus/1.log" Dec 08 21:20:47 crc kubenswrapper[4912]: I1208 21:20:47.226380 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rp5rf" event={"ID":"959add28-5508-49d7-8fe3-404acef398b0","Type":"ContainerStarted","Data":"638726f38d0ee5e325aad32f9f85b601c871f91699d426f8fa589839fab05eb1"} Dec 08 21:20:47 crc kubenswrapper[4912]: I1208 21:20:47.427260 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:47 crc kubenswrapper[4912]: I1208 21:20:47.427342 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:47 crc kubenswrapper[4912]: E1208 21:20:47.427423 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:47 crc kubenswrapper[4912]: I1208 21:20:47.427357 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:47 crc kubenswrapper[4912]: E1208 21:20:47.427555 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:47 crc kubenswrapper[4912]: I1208 21:20:47.427649 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:47 crc kubenswrapper[4912]: E1208 21:20:47.427675 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:47 crc kubenswrapper[4912]: E1208 21:20:47.427725 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:48 crc kubenswrapper[4912]: E1208 21:20:48.531481 4912 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:20:49 crc kubenswrapper[4912]: I1208 21:20:49.426782 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:49 crc kubenswrapper[4912]: I1208 21:20:49.426861 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:49 crc kubenswrapper[4912]: I1208 21:20:49.426821 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:49 crc kubenswrapper[4912]: I1208 21:20:49.426782 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:49 crc kubenswrapper[4912]: E1208 21:20:49.427011 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:49 crc kubenswrapper[4912]: E1208 21:20:49.427200 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:49 crc kubenswrapper[4912]: E1208 21:20:49.427432 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:49 crc kubenswrapper[4912]: E1208 21:20:49.427531 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:51 crc kubenswrapper[4912]: I1208 21:20:51.427375 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:51 crc kubenswrapper[4912]: I1208 21:20:51.427449 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:51 crc kubenswrapper[4912]: I1208 21:20:51.427374 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:51 crc kubenswrapper[4912]: E1208 21:20:51.427680 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:51 crc kubenswrapper[4912]: I1208 21:20:51.427812 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:51 crc kubenswrapper[4912]: E1208 21:20:51.427970 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:51 crc kubenswrapper[4912]: E1208 21:20:51.428156 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:51 crc kubenswrapper[4912]: E1208 21:20:51.428297 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:53 crc kubenswrapper[4912]: I1208 21:20:53.427603 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:53 crc kubenswrapper[4912]: I1208 21:20:53.427603 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:53 crc kubenswrapper[4912]: I1208 21:20:53.428288 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:53 crc kubenswrapper[4912]: E1208 21:20:53.428527 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 21:20:53 crc kubenswrapper[4912]: I1208 21:20:53.428559 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:53 crc kubenswrapper[4912]: E1208 21:20:53.428697 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lhjln" podUID="6f259abd-9b12-458f-975d-68996ae1265c" Dec 08 21:20:53 crc kubenswrapper[4912]: E1208 21:20:53.428843 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 21:20:53 crc kubenswrapper[4912]: E1208 21:20:53.429154 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 21:20:54 crc kubenswrapper[4912]: I1208 21:20:54.927895 4912 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 08 21:20:54 crc kubenswrapper[4912]: I1208 21:20:54.985726 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-s2plb"] Dec 08 21:20:54 crc kubenswrapper[4912]: I1208 21:20:54.987158 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:54 crc kubenswrapper[4912]: I1208 21:20:54.991858 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:54.999636 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:54.999770 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:54.999837 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:54.999929 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.000026 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.000007 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.000263 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.000559 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.002373 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.002567 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-d848b"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.003337 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.003804 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.004264 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kxhsq"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.004353 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.004845 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.006346 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.008927 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.009797 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.010439 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-8rkpr"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.011121 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-8rkpr" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.022438 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wt7vh"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.023551 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.025000 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-sm5lg"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.033620 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.035218 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.036004 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.043872 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.067190 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.067716 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.069334 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.069838 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.069992 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.070604 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.070780 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.070956 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.070968 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-45v4h"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.071197 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.071502 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.071808 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.071976 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.072155 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.072336 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.072484 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.072647 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.072804 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.072985 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.073269 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.075302 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.075539 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.075744 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.075876 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.076254 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.076434 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.076580 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.076031 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.077052 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.077183 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.077289 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.077782 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.078187 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.078294 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.078493 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.078598 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.078696 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.078776 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.078209 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.079311 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.079403 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.079432 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.079516 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.080849 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-46knq"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.081350 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.081995 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.082225 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.082488 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.082690 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.082809 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.082920 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.083082 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.083181 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.087159 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.087945 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.088207 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.088496 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.090111 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2g5dn"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.091760 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.099946 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-lltjv"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.100228 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.102263 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.102642 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.102976 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.103301 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lltjv" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.105179 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.103352 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.103896 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/294c26d9-93fd-4ca1-8744-0bb56db49b94-config\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.105774 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/58a44c46-96a6-4c80-b2b8-2bf7f66c3259-available-featuregates\") pod \"openshift-config-operator-7777fb866f-pmz5c\" (UID: \"58a44c46-96a6-4c80-b2b8-2bf7f66c3259\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.105808 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.105910 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.105947 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj6nm\" (UniqueName: \"kubernetes.io/projected/294c26d9-93fd-4ca1-8744-0bb56db49b94-kube-api-access-hj6nm\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.105976 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b20ccfdc-c499-40e4-9a0b-bebc13394494-encryption-config\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106001 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106027 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-serving-cert\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106076 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106109 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106173 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wsjf\" (UniqueName: \"kubernetes.io/projected/58a44c46-96a6-4c80-b2b8-2bf7f66c3259-kube-api-access-8wsjf\") pod \"openshift-config-operator-7777fb866f-pmz5c\" (UID: \"58a44c46-96a6-4c80-b2b8-2bf7f66c3259\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106202 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w259p\" (UniqueName: \"kubernetes.io/projected/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-kube-api-access-w259p\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106234 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/294c26d9-93fd-4ca1-8744-0bb56db49b94-serving-cert\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106260 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106286 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106314 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/294c26d9-93fd-4ca1-8744-0bb56db49b94-service-ca-bundle\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106338 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-audit-dir\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106353 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106369 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106400 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b20ccfdc-c499-40e4-9a0b-bebc13394494-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106424 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106464 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b20ccfdc-c499-40e4-9a0b-bebc13394494-etcd-client\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106493 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5c9f4edc-e900-4571-8d81-4f253bfe8ea1-images\") pod \"machine-api-operator-5694c8668f-d848b\" (UID: \"5c9f4edc-e900-4571-8d81-4f253bfe8ea1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106517 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b20ccfdc-c499-40e4-9a0b-bebc13394494-audit-policies\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106547 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106578 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-config\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106606 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c4b8e2bd-4e87-4797-b33c-7bb36c40d59d-machine-approver-tls\") pod \"machine-approver-56656f9798-52g7k\" (UID: \"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106628 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8nkl\" (UniqueName: \"kubernetes.io/projected/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-kube-api-access-t8nkl\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106669 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvghm\" (UniqueName: \"kubernetes.io/projected/c4b8e2bd-4e87-4797-b33c-7bb36c40d59d-kube-api-access-vvghm\") pod \"machine-approver-56656f9798-52g7k\" (UID: \"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106693 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b20ccfdc-c499-40e4-9a0b-bebc13394494-serving-cert\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106720 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/294c26d9-93fd-4ca1-8744-0bb56db49b94-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106742 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-client-ca\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106764 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c4b8e2bd-4e87-4797-b33c-7bb36c40d59d-auth-proxy-config\") pod \"machine-approver-56656f9798-52g7k\" (UID: \"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106787 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4b8e2bd-4e87-4797-b33c-7bb36c40d59d-config\") pod \"machine-approver-56656f9798-52g7k\" (UID: \"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106813 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.106969 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhxzl\" (UniqueName: \"kubernetes.io/projected/5c9f4edc-e900-4571-8d81-4f253bfe8ea1-kube-api-access-lhxzl\") pod \"machine-api-operator-5694c8668f-d848b\" (UID: \"5c9f4edc-e900-4571-8d81-4f253bfe8ea1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.107028 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c9f4edc-e900-4571-8d81-4f253bfe8ea1-config\") pod \"machine-api-operator-5694c8668f-d848b\" (UID: \"5c9f4edc-e900-4571-8d81-4f253bfe8ea1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.107077 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-audit-policies\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.107173 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.103394 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.107266 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58a44c46-96a6-4c80-b2b8-2bf7f66c3259-serving-cert\") pod \"openshift-config-operator-7777fb866f-pmz5c\" (UID: \"58a44c46-96a6-4c80-b2b8-2bf7f66c3259\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.107303 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b20ccfdc-c499-40e4-9a0b-bebc13394494-audit-dir\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.107378 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djx7c\" (UniqueName: \"kubernetes.io/projected/b20ccfdc-c499-40e4-9a0b-bebc13394494-kube-api-access-djx7c\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.107453 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/5c9f4edc-e900-4571-8d81-4f253bfe8ea1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-d848b\" (UID: \"5c9f4edc-e900-4571-8d81-4f253bfe8ea1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.107568 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wgkw\" (UniqueName: \"kubernetes.io/projected/b17a753b-7754-47a9-8432-0f1fab0fb3be-kube-api-access-9wgkw\") pod \"downloads-7954f5f757-8rkpr\" (UID: \"b17a753b-7754-47a9-8432-0f1fab0fb3be\") " pod="openshift-console/downloads-7954f5f757-8rkpr" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.107611 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b20ccfdc-c499-40e4-9a0b-bebc13394494-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.108451 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.124799 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.131727 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.132582 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.133658 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.136434 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.136931 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.137166 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.138405 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.138750 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.139938 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.140272 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.141248 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.166700 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.166741 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.167212 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.167323 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.167392 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.167724 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.167759 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.167881 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.168088 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.168242 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.168258 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.168694 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.169769 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.170115 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.170564 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.172796 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.172986 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.173503 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.174273 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mw698"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.174836 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.177056 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.177479 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.178752 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.180065 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.180511 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.183340 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.183470 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.183505 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.183897 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.183992 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.184113 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.184241 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.184323 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.184774 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.189287 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.189503 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.190095 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.191528 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.192448 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.192942 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.193241 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.193539 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.194090 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.197106 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.197764 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-b77mr"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.198246 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.198476 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.198720 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.199141 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.199684 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.200331 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.200585 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-x7c67"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.201746 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.201821 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.202459 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.205454 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.206586 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.206981 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-96qt5"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.207327 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208166 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-tmgx2"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208649 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/294c26d9-93fd-4ca1-8744-0bb56db49b94-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208680 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-client-ca\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208713 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c4b8e2bd-4e87-4797-b33c-7bb36c40d59d-auth-proxy-config\") pod \"machine-approver-56656f9798-52g7k\" (UID: \"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208730 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4b8e2bd-4e87-4797-b33c-7bb36c40d59d-config\") pod \"machine-approver-56656f9798-52g7k\" (UID: \"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208750 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvghm\" (UniqueName: \"kubernetes.io/projected/c4b8e2bd-4e87-4797-b33c-7bb36c40d59d-kube-api-access-vvghm\") pod \"machine-approver-56656f9798-52g7k\" (UID: \"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208775 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b20ccfdc-c499-40e4-9a0b-bebc13394494-serving-cert\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208801 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208835 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhxzl\" (UniqueName: \"kubernetes.io/projected/5c9f4edc-e900-4571-8d81-4f253bfe8ea1-kube-api-access-lhxzl\") pod \"machine-api-operator-5694c8668f-d848b\" (UID: \"5c9f4edc-e900-4571-8d81-4f253bfe8ea1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208856 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c9f4edc-e900-4571-8d81-4f253bfe8ea1-config\") pod \"machine-api-operator-5694c8668f-d848b\" (UID: \"5c9f4edc-e900-4571-8d81-4f253bfe8ea1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208881 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-audit-policies\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208907 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208934 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58a44c46-96a6-4c80-b2b8-2bf7f66c3259-serving-cert\") pod \"openshift-config-operator-7777fb866f-pmz5c\" (UID: \"58a44c46-96a6-4c80-b2b8-2bf7f66c3259\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208959 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b20ccfdc-c499-40e4-9a0b-bebc13394494-audit-dir\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.208985 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/5c9f4edc-e900-4571-8d81-4f253bfe8ea1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-d848b\" (UID: \"5c9f4edc-e900-4571-8d81-4f253bfe8ea1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209006 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djx7c\" (UniqueName: \"kubernetes.io/projected/b20ccfdc-c499-40e4-9a0b-bebc13394494-kube-api-access-djx7c\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209026 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wgkw\" (UniqueName: \"kubernetes.io/projected/b17a753b-7754-47a9-8432-0f1fab0fb3be-kube-api-access-9wgkw\") pod \"downloads-7954f5f757-8rkpr\" (UID: \"b17a753b-7754-47a9-8432-0f1fab0fb3be\") " pod="openshift-console/downloads-7954f5f757-8rkpr" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209064 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b20ccfdc-c499-40e4-9a0b-bebc13394494-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209281 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209318 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/294c26d9-93fd-4ca1-8744-0bb56db49b94-config\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209324 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b20ccfdc-c499-40e4-9a0b-bebc13394494-audit-dir\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209344 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/58a44c46-96a6-4c80-b2b8-2bf7f66c3259-available-featuregates\") pod \"openshift-config-operator-7777fb866f-pmz5c\" (UID: \"58a44c46-96a6-4c80-b2b8-2bf7f66c3259\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209373 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209401 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj6nm\" (UniqueName: \"kubernetes.io/projected/294c26d9-93fd-4ca1-8744-0bb56db49b94-kube-api-access-hj6nm\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209410 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-tmgx2" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209432 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b20ccfdc-c499-40e4-9a0b-bebc13394494-encryption-config\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209457 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209482 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-serving-cert\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209511 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209537 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.209569 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wsjf\" (UniqueName: \"kubernetes.io/projected/58a44c46-96a6-4c80-b2b8-2bf7f66c3259-kube-api-access-8wsjf\") pod \"openshift-config-operator-7777fb866f-pmz5c\" (UID: \"58a44c46-96a6-4c80-b2b8-2bf7f66c3259\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.211182 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-audit-policies\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.211732 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/294c26d9-93fd-4ca1-8744-0bb56db49b94-serving-cert\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.211873 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w259p\" (UniqueName: \"kubernetes.io/projected/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-kube-api-access-w259p\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.211915 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.211958 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212002 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/294c26d9-93fd-4ca1-8744-0bb56db49b94-service-ca-bundle\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212324 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-audit-dir\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212363 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212402 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212446 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b20ccfdc-c499-40e4-9a0b-bebc13394494-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212525 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b20ccfdc-c499-40e4-9a0b-bebc13394494-etcd-client\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212566 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5c9f4edc-e900-4571-8d81-4f253bfe8ea1-images\") pod \"machine-api-operator-5694c8668f-d848b\" (UID: \"5c9f4edc-e900-4571-8d81-4f253bfe8ea1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212592 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b20ccfdc-c499-40e4-9a0b-bebc13394494-audit-policies\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212612 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/294c26d9-93fd-4ca1-8744-0bb56db49b94-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212630 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212816 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c9f4edc-e900-4571-8d81-4f253bfe8ea1-config\") pod \"machine-api-operator-5694c8668f-d848b\" (UID: \"5c9f4edc-e900-4571-8d81-4f253bfe8ea1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212904 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-config\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212939 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c4b8e2bd-4e87-4797-b33c-7bb36c40d59d-machine-approver-tls\") pod \"machine-approver-56656f9798-52g7k\" (UID: \"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.212973 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8nkl\" (UniqueName: \"kubernetes.io/projected/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-kube-api-access-t8nkl\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.213055 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/58a44c46-96a6-4c80-b2b8-2bf7f66c3259-available-featuregates\") pod \"openshift-config-operator-7777fb866f-pmz5c\" (UID: \"58a44c46-96a6-4c80-b2b8-2bf7f66c3259\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.213537 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c4b8e2bd-4e87-4797-b33c-7bb36c40d59d-auth-proxy-config\") pod \"machine-approver-56656f9798-52g7k\" (UID: \"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.216391 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.217849 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4b8e2bd-4e87-4797-b33c-7bb36c40d59d-config\") pod \"machine-approver-56656f9798-52g7k\" (UID: \"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.221686 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.223940 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58a44c46-96a6-4c80-b2b8-2bf7f66c3259-serving-cert\") pod \"openshift-config-operator-7777fb866f-pmz5c\" (UID: \"58a44c46-96a6-4c80-b2b8-2bf7f66c3259\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.224667 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-client-ca\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.224836 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-config\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.225075 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b20ccfdc-c499-40e4-9a0b-bebc13394494-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.225334 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.225350 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/294c26d9-93fd-4ca1-8744-0bb56db49b94-config\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.225510 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.225574 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-audit-dir\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.226357 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/294c26d9-93fd-4ca1-8744-0bb56db49b94-service-ca-bundle\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.226373 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b20ccfdc-c499-40e4-9a0b-bebc13394494-serving-cert\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.226370 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.227161 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/294c26d9-93fd-4ca1-8744-0bb56db49b94-serving-cert\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.227197 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5c9f4edc-e900-4571-8d81-4f253bfe8ea1-images\") pod \"machine-api-operator-5694c8668f-d848b\" (UID: \"5c9f4edc-e900-4571-8d81-4f253bfe8ea1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.227377 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b20ccfdc-c499-40e4-9a0b-bebc13394494-audit-policies\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.228029 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b20ccfdc-c499-40e4-9a0b-bebc13394494-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.228452 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.229920 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.230937 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.230988 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.231448 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.231822 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c4b8e2bd-4e87-4797-b33c-7bb36c40d59d-machine-approver-tls\") pod \"machine-approver-56656f9798-52g7k\" (UID: \"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.231847 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.232074 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/5c9f4edc-e900-4571-8d81-4f253bfe8ea1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-d848b\" (UID: \"5c9f4edc-e900-4571-8d81-4f253bfe8ea1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.232846 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.233259 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.233521 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.233567 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.234070 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.234636 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-serving-cert\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.234806 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b20ccfdc-c499-40e4-9a0b-bebc13394494-encryption-config\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.235855 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-gdkk9"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.237433 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b20ccfdc-c499-40e4-9a0b-bebc13394494-etcd-client\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.238995 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-gdkk9" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.239943 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.240292 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-d848b"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.241544 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.242815 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.244089 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.244712 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-s2plb"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.246502 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.248017 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.248130 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kxhsq"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.250154 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-lltjv"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.250204 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.251252 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-sm5lg"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.252540 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.253553 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.254570 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.255821 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mw698"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.257563 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-45v4h"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.258808 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-bfkpt"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.259805 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.259922 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.261022 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wt7vh"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.262184 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.263323 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2g5dn"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.264386 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-8rkpr"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.265601 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.266710 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.267755 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.268970 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.270168 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-46knq"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.270773 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.271573 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.272857 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.274102 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-x7c67"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.275733 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.277026 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.278139 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.280733 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-mt2rf"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.281610 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-mt2rf" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.282216 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7lvnt"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.283407 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.284350 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.285624 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.287155 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.290600 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-tmgx2"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.292941 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-96qt5"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.294866 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-bfkpt"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.296591 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-gdkk9"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.297966 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.299493 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.300631 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.301734 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7lvnt"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.302819 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-mt2rf"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.303927 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.304927 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-6qbtj"] Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.307101 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6qbtj" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.310578 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.315477 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbrc5\" (UniqueName: \"kubernetes.io/projected/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-kube-api-access-mbrc5\") pod \"route-controller-manager-6576b87f9c-7llx7\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.315529 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-serving-cert\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.315572 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-serving-cert\") pod \"route-controller-manager-6576b87f9c-7llx7\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.315597 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-image-import-ca\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.315620 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw5f2\" (UniqueName: \"kubernetes.io/projected/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-kube-api-access-vw5f2\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.315645 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-registry-tls\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.315665 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47vnk\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-kube-api-access-47vnk\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.315809 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-etcd-client\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.315908 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-client-ca\") pod \"route-controller-manager-6576b87f9c-7llx7\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.315966 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c5facd66-e234-44bc-b3b5-36f9860d98d1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316065 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c5facd66-e234-44bc-b3b5-36f9860d98d1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316120 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316157 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-console-config\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316185 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1627b83a-6756-4797-b857-7495c262d53c-console-oauth-config\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316213 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-config\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316240 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-audit\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316269 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-service-ca\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316290 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-trusted-ca-bundle\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316312 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c5facd66-e234-44bc-b3b5-36f9860d98d1-trusted-ca\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316366 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c5facd66-e234-44bc-b3b5-36f9860d98d1-registry-certificates\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316390 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5zm5\" (UniqueName: \"kubernetes.io/projected/1627b83a-6756-4797-b857-7495c262d53c-kube-api-access-k5zm5\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316410 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-audit-dir\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316434 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-etcd-serving-ca\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316465 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-node-pullsecrets\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316496 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-trusted-ca-bundle\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316518 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-config\") pod \"route-controller-manager-6576b87f9c-7llx7\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316550 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-encryption-config\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316600 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-oauth-serving-cert\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: E1208 21:20:55.316658 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:55.816632223 +0000 UTC m=+137.679634326 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316702 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-bound-sa-token\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.316748 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1627b83a-6756-4797-b857-7495c262d53c-console-serving-cert\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.330145 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.350000 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.370418 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.390464 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.410076 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.417634 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:55 crc kubenswrapper[4912]: E1208 21:20:55.417819 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:55.917792518 +0000 UTC m=+137.780794601 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.417890 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-config\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.417944 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-audit\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.417971 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/49c88b45-1149-4b8a-b390-d817da5ae936-metrics-tls\") pod \"dns-default-bfkpt\" (UID: \"49c88b45-1149-4b8a-b390-d817da5ae936\") " pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418018 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-service-ca\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418060 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-trusted-ca-bundle\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418083 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c5facd66-e234-44bc-b3b5-36f9860d98d1-trusted-ca\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418115 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c5facd66-e234-44bc-b3b5-36f9860d98d1-registry-certificates\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418135 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9e28a674-c730-4538-bb06-d7df2c82cc6e-srv-cert\") pod \"olm-operator-6b444d44fb-x4zf4\" (UID: \"9e28a674-c730-4538-bb06-d7df2c82cc6e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418164 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5zm5\" (UniqueName: \"kubernetes.io/projected/1627b83a-6756-4797-b857-7495c262d53c-kube-api-access-k5zm5\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418191 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-audit-dir\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418214 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-etcd-serving-ca\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418258 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-node-pullsecrets\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418321 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5krpq\" (UniqueName: \"kubernetes.io/projected/49c88b45-1149-4b8a-b390-d817da5ae936-kube-api-access-5krpq\") pod \"dns-default-bfkpt\" (UID: \"49c88b45-1149-4b8a-b390-d817da5ae936\") " pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418358 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-trusted-ca-bundle\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418383 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-config\") pod \"route-controller-manager-6576b87f9c-7llx7\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418408 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0ff0e40-991b-434a-bb8b-ae2bb3f6b559-config\") pod \"kube-controller-manager-operator-78b949d7b-hz6tw\" (UID: \"f0ff0e40-991b-434a-bb8b-ae2bb3f6b559\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418449 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-encryption-config\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418482 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0ff0e40-991b-434a-bb8b-ae2bb3f6b559-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-hz6tw\" (UID: \"f0ff0e40-991b-434a-bb8b-ae2bb3f6b559\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418500 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/49c88b45-1149-4b8a-b390-d817da5ae936-config-volume\") pod \"dns-default-bfkpt\" (UID: \"49c88b45-1149-4b8a-b390-d817da5ae936\") " pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418549 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-oauth-serving-cert\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418572 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9e28a674-c730-4538-bb06-d7df2c82cc6e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-x4zf4\" (UID: \"9e28a674-c730-4538-bb06-d7df2c82cc6e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418594 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-bound-sa-token\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418636 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1627b83a-6756-4797-b857-7495c262d53c-console-serving-cert\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418671 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbrc5\" (UniqueName: \"kubernetes.io/projected/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-kube-api-access-mbrc5\") pod \"route-controller-manager-6576b87f9c-7llx7\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418688 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-serving-cert\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418715 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-serving-cert\") pod \"route-controller-manager-6576b87f9c-7llx7\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418730 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-image-import-ca\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418751 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw5f2\" (UniqueName: \"kubernetes.io/projected/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-kube-api-access-vw5f2\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418771 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-registry-tls\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418790 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47vnk\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-kube-api-access-47vnk\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418821 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-etcd-client\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418865 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-client-ca\") pod \"route-controller-manager-6576b87f9c-7llx7\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418886 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c5facd66-e234-44bc-b3b5-36f9860d98d1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418904 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6p797\" (UniqueName: \"kubernetes.io/projected/9e28a674-c730-4538-bb06-d7df2c82cc6e-kube-api-access-6p797\") pod \"olm-operator-6b444d44fb-x4zf4\" (UID: \"9e28a674-c730-4538-bb06-d7df2c82cc6e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418944 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c5facd66-e234-44bc-b3b5-36f9860d98d1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418970 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.418988 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0ff0e40-991b-434a-bb8b-ae2bb3f6b559-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-hz6tw\" (UID: \"f0ff0e40-991b-434a-bb8b-ae2bb3f6b559\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.419020 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-console-config\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.419056 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-node-pullsecrets\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.419247 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-audit-dir\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.419832 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-config\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.420182 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-etcd-serving-ca\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.420200 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-oauth-serving-cert\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.420448 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-audit\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.420512 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-config\") pod \"route-controller-manager-6576b87f9c-7llx7\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.421521 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-client-ca\") pod \"route-controller-manager-6576b87f9c-7llx7\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.421627 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-service-ca\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.421704 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1627b83a-6756-4797-b857-7495c262d53c-console-oauth-config\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.421821 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-trusted-ca-bundle\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: E1208 21:20:55.421958 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:55.921943674 +0000 UTC m=+137.784945757 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.421979 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-trusted-ca-bundle\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.422187 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-image-import-ca\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.422429 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c5facd66-e234-44bc-b3b5-36f9860d98d1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.422450 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-console-config\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.423099 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c5facd66-e234-44bc-b3b5-36f9860d98d1-trusted-ca\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.423439 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c5facd66-e234-44bc-b3b5-36f9860d98d1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.423947 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-serving-cert\") pod \"route-controller-manager-6576b87f9c-7llx7\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.424187 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-encryption-config\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.425323 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c5facd66-e234-44bc-b3b5-36f9860d98d1-registry-certificates\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.426658 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-etcd-client\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.426921 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.426943 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.426984 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.427132 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.427276 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-registry-tls\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.428124 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-serving-cert\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.437023 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1627b83a-6756-4797-b857-7495c262d53c-console-serving-cert\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.437859 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.443824 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1627b83a-6756-4797-b857-7495c262d53c-console-oauth-config\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.450604 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.469606 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.490138 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.522527 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.522658 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0ff0e40-991b-434a-bb8b-ae2bb3f6b559-config\") pod \"kube-controller-manager-operator-78b949d7b-hz6tw\" (UID: \"f0ff0e40-991b-434a-bb8b-ae2bb3f6b559\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.522696 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/49c88b45-1149-4b8a-b390-d817da5ae936-config-volume\") pod \"dns-default-bfkpt\" (UID: \"49c88b45-1149-4b8a-b390-d817da5ae936\") " pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.522716 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0ff0e40-991b-434a-bb8b-ae2bb3f6b559-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-hz6tw\" (UID: \"f0ff0e40-991b-434a-bb8b-ae2bb3f6b559\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.522742 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9e28a674-c730-4538-bb06-d7df2c82cc6e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-x4zf4\" (UID: \"9e28a674-c730-4538-bb06-d7df2c82cc6e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.522827 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6p797\" (UniqueName: \"kubernetes.io/projected/9e28a674-c730-4538-bb06-d7df2c82cc6e-kube-api-access-6p797\") pod \"olm-operator-6b444d44fb-x4zf4\" (UID: \"9e28a674-c730-4538-bb06-d7df2c82cc6e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.522867 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0ff0e40-991b-434a-bb8b-ae2bb3f6b559-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-hz6tw\" (UID: \"f0ff0e40-991b-434a-bb8b-ae2bb3f6b559\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.522893 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/49c88b45-1149-4b8a-b390-d817da5ae936-metrics-tls\") pod \"dns-default-bfkpt\" (UID: \"49c88b45-1149-4b8a-b390-d817da5ae936\") " pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.522919 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9e28a674-c730-4538-bb06-d7df2c82cc6e-srv-cert\") pod \"olm-operator-6b444d44fb-x4zf4\" (UID: \"9e28a674-c730-4538-bb06-d7df2c82cc6e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.522943 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5krpq\" (UniqueName: \"kubernetes.io/projected/49c88b45-1149-4b8a-b390-d817da5ae936-kube-api-access-5krpq\") pod \"dns-default-bfkpt\" (UID: \"49c88b45-1149-4b8a-b390-d817da5ae936\") " pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:55 crc kubenswrapper[4912]: E1208 21:20:55.523169 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.02315173 +0000 UTC m=+137.886153813 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.525935 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9e28a674-c730-4538-bb06-d7df2c82cc6e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-x4zf4\" (UID: \"9e28a674-c730-4538-bb06-d7df2c82cc6e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.530022 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.549196 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.571108 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.589774 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.597220 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0ff0e40-991b-434a-bb8b-ae2bb3f6b559-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-hz6tw\" (UID: \"f0ff0e40-991b-434a-bb8b-ae2bb3f6b559\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.609561 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.613944 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0ff0e40-991b-434a-bb8b-ae2bb3f6b559-config\") pod \"kube-controller-manager-operator-78b949d7b-hz6tw\" (UID: \"f0ff0e40-991b-434a-bb8b-ae2bb3f6b559\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.623806 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: E1208 21:20:55.624270 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.124249534 +0000 UTC m=+137.987251617 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.629976 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.650624 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.670429 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.689855 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.710766 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.725143 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:55 crc kubenswrapper[4912]: E1208 21:20:55.725347 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.225316128 +0000 UTC m=+138.088318211 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.725573 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: E1208 21:20:55.725942 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.22592585 +0000 UTC m=+138.088927933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.730634 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.750448 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.770545 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.789841 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.810405 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.827110 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:55 crc kubenswrapper[4912]: E1208 21:20:55.827332 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.32728994 +0000 UTC m=+138.190292073 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.827547 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: E1208 21:20:55.828236 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.328210129 +0000 UTC m=+138.191212252 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.832079 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.851087 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.870106 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.890307 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.910607 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.929136 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:55 crc kubenswrapper[4912]: E1208 21:20:55.929403 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.429363654 +0000 UTC m=+138.292365767 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.930064 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.930582 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 08 21:20:55 crc kubenswrapper[4912]: E1208 21:20:55.930644 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.43062077 +0000 UTC m=+138.293622883 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.951416 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.985793 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 08 21:20:55 crc kubenswrapper[4912]: I1208 21:20:55.996151 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.012212 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.030837 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.031008 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.530980919 +0000 UTC m=+138.393983002 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.031203 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.031625 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.531614522 +0000 UTC m=+138.394616605 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.034408 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.051096 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.070680 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.090881 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.110466 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.129845 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.132308 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.132428 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.63240402 +0000 UTC m=+138.495406113 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.132721 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.133080 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.633068914 +0000 UTC m=+138.496070997 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.149702 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.170088 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.190928 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.208309 4912 request.go:700] Waited for 1.005493268s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.209786 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.230049 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.234130 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.234377 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.734346682 +0000 UTC m=+138.597348765 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.234577 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.235204 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.735176089 +0000 UTC m=+138.598178172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.250965 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.269854 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.290564 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.311600 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.330658 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.335766 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.335956 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.835912465 +0000 UTC m=+138.698914588 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.336473 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.337069 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.837004648 +0000 UTC m=+138.700006761 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.351696 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.370419 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.389916 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.411226 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.430690 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.437504 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.437677 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.937652883 +0000 UTC m=+138.800654976 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.437798 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.438213 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:56.938203424 +0000 UTC m=+138.801205527 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.451139 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.470587 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.506936 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w259p\" (UniqueName: \"kubernetes.io/projected/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-kube-api-access-w259p\") pod \"controller-manager-879f6c89f-sm5lg\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.524471 4912 secret.go:188] Couldn't get secret openshift-dns/dns-default-metrics-tls: failed to sync secret cache: timed out waiting for the condition Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.524489 4912 configmap.go:193] Couldn't get configMap openshift-dns/dns-default: failed to sync configmap cache: timed out waiting for the condition Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.524550 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49c88b45-1149-4b8a-b390-d817da5ae936-metrics-tls podName:49c88b45-1149-4b8a-b390-d817da5ae936 nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.024529454 +0000 UTC m=+138.887531537 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/49c88b45-1149-4b8a-b390-d817da5ae936-metrics-tls") pod "dns-default-bfkpt" (UID: "49c88b45-1149-4b8a-b390-d817da5ae936") : failed to sync secret cache: timed out waiting for the condition Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.524458 4912 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.524568 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/49c88b45-1149-4b8a-b390-d817da5ae936-config-volume podName:49c88b45-1149-4b8a-b390-d817da5ae936 nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.024560034 +0000 UTC m=+138.887562117 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/49c88b45-1149-4b8a-b390-d817da5ae936-config-volume") pod "dns-default-bfkpt" (UID: "49c88b45-1149-4b8a-b390-d817da5ae936") : failed to sync configmap cache: timed out waiting for the condition Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.524613 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e28a674-c730-4538-bb06-d7df2c82cc6e-srv-cert podName:9e28a674-c730-4538-bb06-d7df2c82cc6e nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.024589585 +0000 UTC m=+138.887591698 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/9e28a674-c730-4538-bb06-d7df2c82cc6e-srv-cert") pod "olm-operator-6b444d44fb-x4zf4" (UID: "9e28a674-c730-4538-bb06-d7df2c82cc6e") : failed to sync secret cache: timed out waiting for the condition Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.537563 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8nkl\" (UniqueName: \"kubernetes.io/projected/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-kube-api-access-t8nkl\") pod \"oauth-openshift-558db77b4-wt7vh\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.538713 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.539248 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.039228417 +0000 UTC m=+138.902230520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.549220 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvghm\" (UniqueName: \"kubernetes.io/projected/c4b8e2bd-4e87-4797-b33c-7bb36c40d59d-kube-api-access-vvghm\") pod \"machine-approver-56656f9798-52g7k\" (UID: \"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.566473 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhxzl\" (UniqueName: \"kubernetes.io/projected/5c9f4edc-e900-4571-8d81-4f253bfe8ea1-kube-api-access-lhxzl\") pod \"machine-api-operator-5694c8668f-d848b\" (UID: \"5c9f4edc-e900-4571-8d81-4f253bfe8ea1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.586007 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djx7c\" (UniqueName: \"kubernetes.io/projected/b20ccfdc-c499-40e4-9a0b-bebc13394494-kube-api-access-djx7c\") pod \"apiserver-7bbb656c7d-765c9\" (UID: \"b20ccfdc-c499-40e4-9a0b-bebc13394494\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.591961 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.609267 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wgkw\" (UniqueName: \"kubernetes.io/projected/b17a753b-7754-47a9-8432-0f1fab0fb3be-kube-api-access-9wgkw\") pod \"downloads-7954f5f757-8rkpr\" (UID: \"b17a753b-7754-47a9-8432-0f1fab0fb3be\") " pod="openshift-console/downloads-7954f5f757-8rkpr" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.626059 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj6nm\" (UniqueName: \"kubernetes.io/projected/294c26d9-93fd-4ca1-8744-0bb56db49b94-kube-api-access-hj6nm\") pod \"authentication-operator-69f744f599-kxhsq\" (UID: \"294c26d9-93fd-4ca1-8744-0bb56db49b94\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.640436 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.640870 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.140847242 +0000 UTC m=+139.003849345 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.646793 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wsjf\" (UniqueName: \"kubernetes.io/projected/58a44c46-96a6-4c80-b2b8-2bf7f66c3259-kube-api-access-8wsjf\") pod \"openshift-config-operator-7777fb866f-pmz5c\" (UID: \"58a44c46-96a6-4c80-b2b8-2bf7f66c3259\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.650647 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.668612 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.669314 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.672661 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.683151 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-8rkpr" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.691071 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.691249 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.703294 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.710845 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.719639 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.732004 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.741162 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.741634 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.241609319 +0000 UTC m=+139.104611412 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.751014 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.770473 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.780699 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.794102 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.811426 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.812584 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-d848b"] Dec 08 21:20:56 crc kubenswrapper[4912]: W1208 21:20:56.815676 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4b8e2bd_4e87_4797_b33c_7bb36c40d59d.slice/crio-806aa0371f53668a8b651bfb3a73727e414e1f78c0a59503e41039ab342d4be2 WatchSource:0}: Error finding container 806aa0371f53668a8b651bfb3a73727e414e1f78c0a59503e41039ab342d4be2: Status 404 returned error can't find the container with id 806aa0371f53668a8b651bfb3a73727e414e1f78c0a59503e41039ab342d4be2 Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.835794 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.843077 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.843505 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.343488999 +0000 UTC m=+139.206491072 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.851015 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: W1208 21:20:56.868933 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c9f4edc_e900_4571_8d81_4f253bfe8ea1.slice/crio-57215c90d72ef93c6298911782a097b5f72f85e47f57ae68ccd38475b242d3d4 WatchSource:0}: Error finding container 57215c90d72ef93c6298911782a097b5f72f85e47f57ae68ccd38475b242d3d4: Status 404 returned error can't find the container with id 57215c90d72ef93c6298911782a097b5f72f85e47f57ae68ccd38475b242d3d4 Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.871114 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.892537 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.911186 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.931489 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.944674 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:56 crc kubenswrapper[4912]: E1208 21:20:56.945610 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.445529963 +0000 UTC m=+139.308532066 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.950523 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.958881 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9"] Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.970858 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 08 21:20:56 crc kubenswrapper[4912]: W1208 21:20:56.975682 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb20ccfdc_c499_40e4_9a0b_bebc13394494.slice/crio-25cf23319171d9bd56e35d89efdb86b149bca97a685cf909ddc79332dd1484d4 WatchSource:0}: Error finding container 25cf23319171d9bd56e35d89efdb86b149bca97a685cf909ddc79332dd1484d4: Status 404 returned error can't find the container with id 25cf23319171d9bd56e35d89efdb86b149bca97a685cf909ddc79332dd1484d4 Dec 08 21:20:56 crc kubenswrapper[4912]: I1208 21:20:56.991649 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.010702 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.030747 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.047016 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/49c88b45-1149-4b8a-b390-d817da5ae936-metrics-tls\") pod \"dns-default-bfkpt\" (UID: \"49c88b45-1149-4b8a-b390-d817da5ae936\") " pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.047108 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9e28a674-c730-4538-bb06-d7df2c82cc6e-srv-cert\") pod \"olm-operator-6b444d44fb-x4zf4\" (UID: \"9e28a674-c730-4538-bb06-d7df2c82cc6e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.047188 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/49c88b45-1149-4b8a-b390-d817da5ae936-config-volume\") pod \"dns-default-bfkpt\" (UID: \"49c88b45-1149-4b8a-b390-d817da5ae936\") " pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.047333 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.047717 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.547698309 +0000 UTC m=+139.410700392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.048145 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/49c88b45-1149-4b8a-b390-d817da5ae936-config-volume\") pod \"dns-default-bfkpt\" (UID: \"49c88b45-1149-4b8a-b390-d817da5ae936\") " pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.051540 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.057203 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/49c88b45-1149-4b8a-b390-d817da5ae936-metrics-tls\") pod \"dns-default-bfkpt\" (UID: \"49c88b45-1149-4b8a-b390-d817da5ae936\") " pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.058447 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9e28a674-c730-4538-bb06-d7df2c82cc6e-srv-cert\") pod \"olm-operator-6b444d44fb-x4zf4\" (UID: \"9e28a674-c730-4538-bb06-d7df2c82cc6e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.069892 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c"] Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.072329 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 08 21:20:57 crc kubenswrapper[4912]: W1208 21:20:57.078605 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58a44c46_96a6_4c80_b2b8_2bf7f66c3259.slice/crio-91a036b78ee6bf98b2bb15f5ce798aefa6fba2c7bf8a17b9e23b67ab57a3e9ef WatchSource:0}: Error finding container 91a036b78ee6bf98b2bb15f5ce798aefa6fba2c7bf8a17b9e23b67ab57a3e9ef: Status 404 returned error can't find the container with id 91a036b78ee6bf98b2bb15f5ce798aefa6fba2c7bf8a17b9e23b67ab57a3e9ef Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.090319 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.110277 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.130102 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.148679 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.148952 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.648917736 +0000 UTC m=+139.511919819 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.150214 4912 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.150391 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.150807 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.650790434 +0000 UTC m=+139.513792517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.170690 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.192660 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.208430 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kxhsq"] Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.209366 4912 request.go:700] Waited for 1.901940448s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dnode-bootstrapper-token&limit=500&resourceVersion=0 Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.210607 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-sm5lg"] Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.211622 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-8rkpr"] Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.212428 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.218297 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wt7vh"] Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.253964 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.254268 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.754221216 +0000 UTC m=+139.617223309 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.254413 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.254871 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.754857669 +0000 UTC m=+139.617859772 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.265890 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" event={"ID":"5c9f4edc-e900-4571-8d81-4f253bfe8ea1","Type":"ContainerStarted","Data":"57215c90d72ef93c6298911782a097b5f72f85e47f57ae68ccd38475b242d3d4"} Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.267364 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" event={"ID":"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d","Type":"ContainerStarted","Data":"806aa0371f53668a8b651bfb3a73727e414e1f78c0a59503e41039ab342d4be2"} Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.268382 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" event={"ID":"58a44c46-96a6-4c80-b2b8-2bf7f66c3259","Type":"ContainerStarted","Data":"91a036b78ee6bf98b2bb15f5ce798aefa6fba2c7bf8a17b9e23b67ab57a3e9ef"} Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.268948 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5zm5\" (UniqueName: \"kubernetes.io/projected/1627b83a-6756-4797-b857-7495c262d53c-kube-api-access-k5zm5\") pod \"console-f9d7485db-46knq\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.269349 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" event={"ID":"b20ccfdc-c499-40e4-9a0b-bebc13394494","Type":"ContainerStarted","Data":"25cf23319171d9bd56e35d89efdb86b149bca97a685cf909ddc79332dd1484d4"} Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.285973 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw5f2\" (UniqueName: \"kubernetes.io/projected/1b7b4ef5-c90c-459b-8285-d5163a0e9b78-kube-api-access-vw5f2\") pod \"apiserver-76f77b778f-s2plb\" (UID: \"1b7b4ef5-c90c-459b-8285-d5163a0e9b78\") " pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.303969 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbrc5\" (UniqueName: \"kubernetes.io/projected/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-kube-api-access-mbrc5\") pod \"route-controller-manager-6576b87f9c-7llx7\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.324800 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47vnk\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-kube-api-access-47vnk\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.345718 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-bound-sa-token\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.350262 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.355593 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.355783 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.855754679 +0000 UTC m=+139.718756762 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.356044 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.356385 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.856377462 +0000 UTC m=+139.719379545 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.370983 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.386623 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.391363 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.410466 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.416746 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.431364 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.449372 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.456771 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.456870 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.956833913 +0000 UTC m=+139.819835996 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.457000 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.457388 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:57.957378714 +0000 UTC m=+139.820380797 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.467758 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.504310 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5krpq\" (UniqueName: \"kubernetes.io/projected/49c88b45-1149-4b8a-b390-d817da5ae936-kube-api-access-5krpq\") pod \"dns-default-bfkpt\" (UID: \"49c88b45-1149-4b8a-b390-d817da5ae936\") " pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.531064 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0ff0e40-991b-434a-bb8b-ae2bb3f6b559-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-hz6tw\" (UID: \"f0ff0e40-991b-434a-bb8b-ae2bb3f6b559\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.549122 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6p797\" (UniqueName: \"kubernetes.io/projected/9e28a674-c730-4538-bb06-d7df2c82cc6e-kube-api-access-6p797\") pod \"olm-operator-6b444d44fb-x4zf4\" (UID: \"9e28a674-c730-4538-bb06-d7df2c82cc6e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559061 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.559346 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:58.059244264 +0000 UTC m=+139.922246347 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559478 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nwpx\" (UniqueName: \"kubernetes.io/projected/e4c5991e-05aa-4110-92df-be3bd07d0a32-kube-api-access-4nwpx\") pod \"service-ca-operator-777779d784-qxcg4\" (UID: \"e4c5991e-05aa-4110-92df-be3bd07d0a32\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559524 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77pps\" (UniqueName: \"kubernetes.io/projected/98f85866-57fd-4792-889d-1d16aea9d19e-kube-api-access-77pps\") pod \"ingress-canary-mt2rf\" (UID: \"98f85866-57fd-4792-889d-1d16aea9d19e\") " pod="openshift-ingress-canary/ingress-canary-mt2rf" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559569 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fp4w2\" (UniqueName: \"kubernetes.io/projected/36c61c78-9258-4664-89ed-f1f82a6f9e06-kube-api-access-fp4w2\") pod \"console-operator-58897d9998-2g5dn\" (UID: \"36c61c78-9258-4664-89ed-f1f82a6f9e06\") " pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559597 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl799\" (UniqueName: \"kubernetes.io/projected/3104e42f-18cf-41de-b704-8b63a4ae2a44-kube-api-access-jl799\") pod \"cluster-samples-operator-665b6dd947-nfncz\" (UID: \"3104e42f-18cf-41de-b704-8b63a4ae2a44\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559619 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqgg7\" (UniqueName: \"kubernetes.io/projected/01944eae-d386-4ca1-b6fd-bb27526cc5ff-kube-api-access-gqgg7\") pod \"machine-config-operator-74547568cd-dtf8c\" (UID: \"01944eae-d386-4ca1-b6fd-bb27526cc5ff\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559680 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/81b4fd7c-89e0-4795-b372-2d859131320c-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-kxgdg\" (UID: \"81b4fd7c-89e0-4795-b372-2d859131320c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559711 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dqcm\" (UniqueName: \"kubernetes.io/projected/5ecd7845-9751-4783-926a-7b6f6344a767-kube-api-access-5dqcm\") pod \"package-server-manager-789f6589d5-ct9x7\" (UID: \"5ecd7845-9751-4783-926a-7b6f6344a767\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559733 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a61272d2-307f-4dcb-b98a-16b015c7c8bd-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zwxn4\" (UID: \"a61272d2-307f-4dcb-b98a-16b015c7c8bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559758 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/eb28504c-22a1-4b85-87f4-3b5c14cc5785-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hfs55\" (UID: \"eb28504c-22a1-4b85-87f4-3b5c14cc5785\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559784 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/428a33f4-5469-49d6-a012-49c7656807a7-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-dgql6\" (UID: \"428a33f4-5469-49d6-a012-49c7656807a7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559807 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-mountpoint-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559837 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-etcd-ca\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559855 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2d947845-a2f2-4d22-be7f-da51b6edac44-webhook-cert\") pod \"packageserver-d55dfcdfc-nq68n\" (UID: \"2d947845-a2f2-4d22-be7f-da51b6edac44\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559893 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkb6s\" (UniqueName: \"kubernetes.io/projected/8a397f96-299f-4205-abb9-a261f73b1305-kube-api-access-qkb6s\") pod \"service-ca-9c57cc56f-x7c67\" (UID: \"8a397f96-299f-4205-abb9-a261f73b1305\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559914 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/2d947845-a2f2-4d22-be7f-da51b6edac44-tmpfs\") pod \"packageserver-d55dfcdfc-nq68n\" (UID: \"2d947845-a2f2-4d22-be7f-da51b6edac44\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559963 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48c80085-ebde-44ec-a981-c024ce9d008d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wjcc5\" (UID: \"48c80085-ebde-44ec-a981-c024ce9d008d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.559981 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4c5991e-05aa-4110-92df-be3bd07d0a32-config\") pod \"service-ca-operator-777779d784-qxcg4\" (UID: \"e4c5991e-05aa-4110-92df-be3bd07d0a32\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560003 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-plugins-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560022 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81767e03-8fa4-4b32-822b-829600cb8114-config\") pod \"openshift-apiserver-operator-796bbdcf4f-msd8v\" (UID: \"81767e03-8fa4-4b32-822b-829600cb8114\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560054 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f40e8be-f418-4be6-912f-d732718730e7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mw698\" (UID: \"9f40e8be-f418-4be6-912f-d732718730e7\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560076 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2d947845-a2f2-4d22-be7f-da51b6edac44-apiservice-cert\") pod \"packageserver-d55dfcdfc-nq68n\" (UID: \"2d947845-a2f2-4d22-be7f-da51b6edac44\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560130 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9z6l\" (UniqueName: \"kubernetes.io/projected/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-kube-api-access-j9z6l\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560150 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlzwg\" (UniqueName: \"kubernetes.io/projected/2d947845-a2f2-4d22-be7f-da51b6edac44-kube-api-access-dlzwg\") pod \"packageserver-d55dfcdfc-nq68n\" (UID: \"2d947845-a2f2-4d22-be7f-da51b6edac44\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560175 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eb28504c-22a1-4b85-87f4-3b5c14cc5785-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hfs55\" (UID: \"eb28504c-22a1-4b85-87f4-3b5c14cc5785\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560204 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgsc6\" (UniqueName: \"kubernetes.io/projected/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-kube-api-access-hgsc6\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560226 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a26f0c32-76e5-4b97-9592-e23abb23fafa-node-bootstrap-token\") pod \"machine-config-server-6qbtj\" (UID: \"a26f0c32-76e5-4b97-9592-e23abb23fafa\") " pod="openshift-machine-config-operator/machine-config-server-6qbtj" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560259 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8a397f96-299f-4205-abb9-a261f73b1305-signing-cabundle\") pod \"service-ca-9c57cc56f-x7c67\" (UID: \"8a397f96-299f-4205-abb9-a261f73b1305\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560279 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ns6xj\" (UniqueName: \"kubernetes.io/projected/81b4fd7c-89e0-4795-b372-2d859131320c-kube-api-access-ns6xj\") pod \"machine-config-controller-84d6567774-kxgdg\" (UID: \"81b4fd7c-89e0-4795-b372-2d859131320c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560310 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/428a33f4-5469-49d6-a012-49c7656807a7-config\") pod \"kube-apiserver-operator-766d6c64bb-dgql6\" (UID: \"428a33f4-5469-49d6-a012-49c7656807a7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560335 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36c61c78-9258-4664-89ed-f1f82a6f9e06-config\") pod \"console-operator-58897d9998-2g5dn\" (UID: \"36c61c78-9258-4664-89ed-f1f82a6f9e06\") " pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560353 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-etcd-service-ca\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560371 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj657\" (UniqueName: \"kubernetes.io/projected/450b96f9-f5a6-485f-a3fd-ae5a408b2b8d-kube-api-access-zj657\") pod \"ingress-operator-5b745b69d9-jn7k8\" (UID: \"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560408 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qj4wt\" (UniqueName: \"kubernetes.io/projected/2573e7ee-4cff-4144-97d8-b19c2c5d1f42-kube-api-access-qj4wt\") pod \"dns-operator-744455d44c-tmgx2\" (UID: \"2573e7ee-4cff-4144-97d8-b19c2c5d1f42\") " pod="openshift-dns-operator/dns-operator-744455d44c-tmgx2" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560429 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dhqm\" (UniqueName: \"kubernetes.io/projected/1259f14f-f93a-4765-9f93-ac4af158951e-kube-api-access-7dhqm\") pod \"collect-profiles-29420475-4g5jh\" (UID: \"1259f14f-f93a-4765-9f93-ac4af158951e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560488 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48c80085-ebde-44ec-a981-c024ce9d008d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wjcc5\" (UID: \"48c80085-ebde-44ec-a981-c024ce9d008d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.560537 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36c61c78-9258-4664-89ed-f1f82a6f9e06-serving-cert\") pod \"console-operator-58897d9998-2g5dn\" (UID: \"36c61c78-9258-4664-89ed-f1f82a6f9e06\") " pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561239 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eb28504c-22a1-4b85-87f4-3b5c14cc5785-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hfs55\" (UID: \"eb28504c-22a1-4b85-87f4-3b5c14cc5785\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561327 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a26f0c32-76e5-4b97-9592-e23abb23fafa-certs\") pod \"machine-config-server-6qbtj\" (UID: \"a26f0c32-76e5-4b97-9592-e23abb23fafa\") " pod="openshift-machine-config-operator/machine-config-server-6qbtj" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561365 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/225dcce8-cf38-4809-b2de-41f62273bf8d-srv-cert\") pod \"catalog-operator-68c6474976-fq6s8\" (UID: \"225dcce8-cf38-4809-b2de-41f62273bf8d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561413 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0fb52c9e-bb73-483a-a064-b1bf85f5e901-metrics-certs\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561499 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0fb52c9e-bb73-483a-a064-b1bf85f5e901-service-ca-bundle\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561553 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-csi-data-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561644 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzvqq\" (UniqueName: \"kubernetes.io/projected/60cbb4f5-7131-4242-8a30-e9706990636f-kube-api-access-qzvqq\") pod \"multus-admission-controller-857f4d67dd-gdkk9\" (UID: \"60cbb4f5-7131-4242-8a30-e9706990636f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gdkk9" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561677 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/01944eae-d386-4ca1-b6fd-bb27526cc5ff-proxy-tls\") pod \"machine-config-operator-74547568cd-dtf8c\" (UID: \"01944eae-d386-4ca1-b6fd-bb27526cc5ff\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561775 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxqq4\" (UniqueName: \"kubernetes.io/projected/c189484f-e280-4468-b815-f56ef70e07eb-kube-api-access-pxqq4\") pod \"migrator-59844c95c7-lltjv\" (UID: \"c189484f-e280-4468-b815-f56ef70e07eb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lltjv" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561835 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/0fb52c9e-bb73-483a-a064-b1bf85f5e901-default-certificate\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561856 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/450b96f9-f5a6-485f-a3fd-ae5a408b2b8d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jn7k8\" (UID: \"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561883 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/36c61c78-9258-4664-89ed-f1f82a6f9e06-trusted-ca\") pod \"console-operator-58897d9998-2g5dn\" (UID: \"36c61c78-9258-4664-89ed-f1f82a6f9e06\") " pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561901 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc9hz\" (UniqueName: \"kubernetes.io/projected/a26f0c32-76e5-4b97-9592-e23abb23fafa-kube-api-access-hc9hz\") pod \"machine-config-server-6qbtj\" (UID: \"a26f0c32-76e5-4b97-9592-e23abb23fafa\") " pod="openshift-machine-config-operator/machine-config-server-6qbtj" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.561920 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d98e9979-80de-4f7d-a420-989bc116ab5c-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tt47m\" (UID: \"d98e9979-80de-4f7d-a420-989bc116ab5c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.562087 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/450b96f9-f5a6-485f-a3fd-ae5a408b2b8d-metrics-tls\") pod \"ingress-operator-5b745b69d9-jn7k8\" (UID: \"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.562120 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77bxp\" (UniqueName: \"kubernetes.io/projected/225dcce8-cf38-4809-b2de-41f62273bf8d-kube-api-access-77bxp\") pod \"catalog-operator-68c6474976-fq6s8\" (UID: \"225dcce8-cf38-4809-b2de-41f62273bf8d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.562144 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/0fb52c9e-bb73-483a-a064-b1bf85f5e901-stats-auth\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.562174 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6ba5e948-9e8e-432b-9973-e0248bc2b82e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bx7nc\" (UID: \"6ba5e948-9e8e-432b-9973-e0248bc2b82e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.562200 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/01944eae-d386-4ca1-b6fd-bb27526cc5ff-auth-proxy-config\") pod \"machine-config-operator-74547568cd-dtf8c\" (UID: \"01944eae-d386-4ca1-b6fd-bb27526cc5ff\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563383 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1259f14f-f93a-4765-9f93-ac4af158951e-secret-volume\") pod \"collect-profiles-29420475-4g5jh\" (UID: \"1259f14f-f93a-4765-9f93-ac4af158951e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563558 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/450b96f9-f5a6-485f-a3fd-ae5a408b2b8d-trusted-ca\") pod \"ingress-operator-5b745b69d9-jn7k8\" (UID: \"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563584 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/01944eae-d386-4ca1-b6fd-bb27526cc5ff-images\") pod \"machine-config-operator-74547568cd-dtf8c\" (UID: \"01944eae-d386-4ca1-b6fd-bb27526cc5ff\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563671 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-config\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563697 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d98e9979-80de-4f7d-a420-989bc116ab5c-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tt47m\" (UID: \"d98e9979-80de-4f7d-a420-989bc116ab5c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563772 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/428a33f4-5469-49d6-a012-49c7656807a7-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-dgql6\" (UID: \"428a33f4-5469-49d6-a012-49c7656807a7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563819 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-socket-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563838 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpxqh\" (UniqueName: \"kubernetes.io/projected/6ba5e948-9e8e-432b-9973-e0248bc2b82e-kube-api-access-rpxqh\") pod \"control-plane-machine-set-operator-78cbb6b69f-bx7nc\" (UID: \"6ba5e948-9e8e-432b-9973-e0248bc2b82e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563865 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx22n\" (UniqueName: \"kubernetes.io/projected/d98e9979-80de-4f7d-a420-989bc116ab5c-kube-api-access-fx22n\") pod \"openshift-controller-manager-operator-756b6f6bc6-tt47m\" (UID: \"d98e9979-80de-4f7d-a420-989bc116ab5c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563887 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx59x\" (UniqueName: \"kubernetes.io/projected/9f40e8be-f418-4be6-912f-d732718730e7-kube-api-access-gx59x\") pod \"marketplace-operator-79b997595-mw698\" (UID: \"9f40e8be-f418-4be6-912f-d732718730e7\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563923 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1259f14f-f93a-4765-9f93-ac4af158951e-config-volume\") pod \"collect-profiles-29420475-4g5jh\" (UID: \"1259f14f-f93a-4765-9f93-ac4af158951e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563941 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/81b4fd7c-89e0-4795-b372-2d859131320c-proxy-tls\") pod \"machine-config-controller-84d6567774-kxgdg\" (UID: \"81b4fd7c-89e0-4795-b372-2d859131320c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.563983 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hn22\" (UniqueName: \"kubernetes.io/projected/48c80085-ebde-44ec-a981-c024ce9d008d-kube-api-access-2hn22\") pod \"kube-storage-version-migrator-operator-b67b599dd-wjcc5\" (UID: \"48c80085-ebde-44ec-a981-c024ce9d008d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.564361 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-etcd-client\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.564639 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4c5991e-05aa-4110-92df-be3bd07d0a32-serving-cert\") pod \"service-ca-operator-777779d784-qxcg4\" (UID: \"e4c5991e-05aa-4110-92df-be3bd07d0a32\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.564734 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f40e8be-f418-4be6-912f-d732718730e7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mw698\" (UID: \"9f40e8be-f418-4be6-912f-d732718730e7\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.564924 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81767e03-8fa4-4b32-822b-829600cb8114-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-msd8v\" (UID: \"81767e03-8fa4-4b32-822b-829600cb8114\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.565214 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dk27p\" (UniqueName: \"kubernetes.io/projected/81767e03-8fa4-4b32-822b-829600cb8114-kube-api-access-dk27p\") pod \"openshift-apiserver-operator-796bbdcf4f-msd8v\" (UID: \"81767e03-8fa4-4b32-822b-829600cb8114\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.565337 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.565430 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8a397f96-299f-4205-abb9-a261f73b1305-signing-key\") pod \"service-ca-9c57cc56f-x7c67\" (UID: \"8a397f96-299f-4205-abb9-a261f73b1305\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.565637 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a61272d2-307f-4dcb-b98a-16b015c7c8bd-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zwxn4\" (UID: \"a61272d2-307f-4dcb-b98a-16b015c7c8bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.565690 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/225dcce8-cf38-4809-b2de-41f62273bf8d-profile-collector-cert\") pod \"catalog-operator-68c6474976-fq6s8\" (UID: \"225dcce8-cf38-4809-b2de-41f62273bf8d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.565710 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:58.065694237 +0000 UTC m=+139.928696520 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.565738 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/60cbb4f5-7131-4242-8a30-e9706990636f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-gdkk9\" (UID: \"60cbb4f5-7131-4242-8a30-e9706990636f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gdkk9" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.565768 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-serving-cert\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.565797 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-registration-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.565882 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlpzb\" (UniqueName: \"kubernetes.io/projected/eb28504c-22a1-4b85-87f4-3b5c14cc5785-kube-api-access-zlpzb\") pod \"cluster-image-registry-operator-dc59b4c8b-hfs55\" (UID: \"eb28504c-22a1-4b85-87f4-3b5c14cc5785\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.565956 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3104e42f-18cf-41de-b704-8b63a4ae2a44-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nfncz\" (UID: \"3104e42f-18cf-41de-b704-8b63a4ae2a44\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.566024 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2573e7ee-4cff-4144-97d8-b19c2c5d1f42-metrics-tls\") pod \"dns-operator-744455d44c-tmgx2\" (UID: \"2573e7ee-4cff-4144-97d8-b19c2c5d1f42\") " pod="openshift-dns-operator/dns-operator-744455d44c-tmgx2" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.566156 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a61272d2-307f-4dcb-b98a-16b015c7c8bd-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zwxn4\" (UID: \"a61272d2-307f-4dcb-b98a-16b015c7c8bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.566838 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn7mj\" (UniqueName: \"kubernetes.io/projected/0fb52c9e-bb73-483a-a064-b1bf85f5e901-kube-api-access-dn7mj\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.570558 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/5ecd7845-9751-4783-926a-7b6f6344a767-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-ct9x7\" (UID: \"5ecd7845-9751-4783-926a-7b6f6344a767\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.570620 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/98f85866-57fd-4792-889d-1d16aea9d19e-cert\") pod \"ingress-canary-mt2rf\" (UID: \"98f85866-57fd-4792-889d-1d16aea9d19e\") " pod="openshift-ingress-canary/ingress-canary-mt2rf" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.613803 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.657727 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-bfkpt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672092 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672704 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f40e8be-f418-4be6-912f-d732718730e7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mw698\" (UID: \"9f40e8be-f418-4be6-912f-d732718730e7\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672731 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-plugins-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672748 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81767e03-8fa4-4b32-822b-829600cb8114-config\") pod \"openshift-apiserver-operator-796bbdcf4f-msd8v\" (UID: \"81767e03-8fa4-4b32-822b-829600cb8114\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672764 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2d947845-a2f2-4d22-be7f-da51b6edac44-apiservice-cert\") pod \"packageserver-d55dfcdfc-nq68n\" (UID: \"2d947845-a2f2-4d22-be7f-da51b6edac44\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672785 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9z6l\" (UniqueName: \"kubernetes.io/projected/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-kube-api-access-j9z6l\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672800 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlzwg\" (UniqueName: \"kubernetes.io/projected/2d947845-a2f2-4d22-be7f-da51b6edac44-kube-api-access-dlzwg\") pod \"packageserver-d55dfcdfc-nq68n\" (UID: \"2d947845-a2f2-4d22-be7f-da51b6edac44\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672816 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eb28504c-22a1-4b85-87f4-3b5c14cc5785-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hfs55\" (UID: \"eb28504c-22a1-4b85-87f4-3b5c14cc5785\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672842 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgsc6\" (UniqueName: \"kubernetes.io/projected/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-kube-api-access-hgsc6\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672857 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a26f0c32-76e5-4b97-9592-e23abb23fafa-node-bootstrap-token\") pod \"machine-config-server-6qbtj\" (UID: \"a26f0c32-76e5-4b97-9592-e23abb23fafa\") " pod="openshift-machine-config-operator/machine-config-server-6qbtj" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672874 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8a397f96-299f-4205-abb9-a261f73b1305-signing-cabundle\") pod \"service-ca-9c57cc56f-x7c67\" (UID: \"8a397f96-299f-4205-abb9-a261f73b1305\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672888 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ns6xj\" (UniqueName: \"kubernetes.io/projected/81b4fd7c-89e0-4795-b372-2d859131320c-kube-api-access-ns6xj\") pod \"machine-config-controller-84d6567774-kxgdg\" (UID: \"81b4fd7c-89e0-4795-b372-2d859131320c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672914 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/428a33f4-5469-49d6-a012-49c7656807a7-config\") pod \"kube-apiserver-operator-766d6c64bb-dgql6\" (UID: \"428a33f4-5469-49d6-a012-49c7656807a7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672935 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36c61c78-9258-4664-89ed-f1f82a6f9e06-config\") pod \"console-operator-58897d9998-2g5dn\" (UID: \"36c61c78-9258-4664-89ed-f1f82a6f9e06\") " pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672952 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-etcd-service-ca\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672969 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj657\" (UniqueName: \"kubernetes.io/projected/450b96f9-f5a6-485f-a3fd-ae5a408b2b8d-kube-api-access-zj657\") pod \"ingress-operator-5b745b69d9-jn7k8\" (UID: \"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.672988 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dhqm\" (UniqueName: \"kubernetes.io/projected/1259f14f-f93a-4765-9f93-ac4af158951e-kube-api-access-7dhqm\") pod \"collect-profiles-29420475-4g5jh\" (UID: \"1259f14f-f93a-4765-9f93-ac4af158951e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673005 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qj4wt\" (UniqueName: \"kubernetes.io/projected/2573e7ee-4cff-4144-97d8-b19c2c5d1f42-kube-api-access-qj4wt\") pod \"dns-operator-744455d44c-tmgx2\" (UID: \"2573e7ee-4cff-4144-97d8-b19c2c5d1f42\") " pod="openshift-dns-operator/dns-operator-744455d44c-tmgx2" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673023 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48c80085-ebde-44ec-a981-c024ce9d008d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wjcc5\" (UID: \"48c80085-ebde-44ec-a981-c024ce9d008d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673053 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36c61c78-9258-4664-89ed-f1f82a6f9e06-serving-cert\") pod \"console-operator-58897d9998-2g5dn\" (UID: \"36c61c78-9258-4664-89ed-f1f82a6f9e06\") " pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673070 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eb28504c-22a1-4b85-87f4-3b5c14cc5785-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hfs55\" (UID: \"eb28504c-22a1-4b85-87f4-3b5c14cc5785\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673088 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/225dcce8-cf38-4809-b2de-41f62273bf8d-srv-cert\") pod \"catalog-operator-68c6474976-fq6s8\" (UID: \"225dcce8-cf38-4809-b2de-41f62273bf8d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673121 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0fb52c9e-bb73-483a-a064-b1bf85f5e901-metrics-certs\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673142 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a26f0c32-76e5-4b97-9592-e23abb23fafa-certs\") pod \"machine-config-server-6qbtj\" (UID: \"a26f0c32-76e5-4b97-9592-e23abb23fafa\") " pod="openshift-machine-config-operator/machine-config-server-6qbtj" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673163 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0fb52c9e-bb73-483a-a064-b1bf85f5e901-service-ca-bundle\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673182 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-csi-data-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673219 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/01944eae-d386-4ca1-b6fd-bb27526cc5ff-proxy-tls\") pod \"machine-config-operator-74547568cd-dtf8c\" (UID: \"01944eae-d386-4ca1-b6fd-bb27526cc5ff\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673244 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzvqq\" (UniqueName: \"kubernetes.io/projected/60cbb4f5-7131-4242-8a30-e9706990636f-kube-api-access-qzvqq\") pod \"multus-admission-controller-857f4d67dd-gdkk9\" (UID: \"60cbb4f5-7131-4242-8a30-e9706990636f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gdkk9" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673268 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxqq4\" (UniqueName: \"kubernetes.io/projected/c189484f-e280-4468-b815-f56ef70e07eb-kube-api-access-pxqq4\") pod \"migrator-59844c95c7-lltjv\" (UID: \"c189484f-e280-4468-b815-f56ef70e07eb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lltjv" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673290 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/0fb52c9e-bb73-483a-a064-b1bf85f5e901-default-certificate\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673310 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/450b96f9-f5a6-485f-a3fd-ae5a408b2b8d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jn7k8\" (UID: \"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673332 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/36c61c78-9258-4664-89ed-f1f82a6f9e06-trusted-ca\") pod \"console-operator-58897d9998-2g5dn\" (UID: \"36c61c78-9258-4664-89ed-f1f82a6f9e06\") " pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673352 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc9hz\" (UniqueName: \"kubernetes.io/projected/a26f0c32-76e5-4b97-9592-e23abb23fafa-kube-api-access-hc9hz\") pod \"machine-config-server-6qbtj\" (UID: \"a26f0c32-76e5-4b97-9592-e23abb23fafa\") " pod="openshift-machine-config-operator/machine-config-server-6qbtj" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673369 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d98e9979-80de-4f7d-a420-989bc116ab5c-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tt47m\" (UID: \"d98e9979-80de-4f7d-a420-989bc116ab5c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673387 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77bxp\" (UniqueName: \"kubernetes.io/projected/225dcce8-cf38-4809-b2de-41f62273bf8d-kube-api-access-77bxp\") pod \"catalog-operator-68c6474976-fq6s8\" (UID: \"225dcce8-cf38-4809-b2de-41f62273bf8d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673403 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/0fb52c9e-bb73-483a-a064-b1bf85f5e901-stats-auth\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673420 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/450b96f9-f5a6-485f-a3fd-ae5a408b2b8d-metrics-tls\") pod \"ingress-operator-5b745b69d9-jn7k8\" (UID: \"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673440 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6ba5e948-9e8e-432b-9973-e0248bc2b82e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bx7nc\" (UID: \"6ba5e948-9e8e-432b-9973-e0248bc2b82e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673471 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/01944eae-d386-4ca1-b6fd-bb27526cc5ff-auth-proxy-config\") pod \"machine-config-operator-74547568cd-dtf8c\" (UID: \"01944eae-d386-4ca1-b6fd-bb27526cc5ff\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673489 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1259f14f-f93a-4765-9f93-ac4af158951e-secret-volume\") pod \"collect-profiles-29420475-4g5jh\" (UID: \"1259f14f-f93a-4765-9f93-ac4af158951e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673507 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/450b96f9-f5a6-485f-a3fd-ae5a408b2b8d-trusted-ca\") pod \"ingress-operator-5b745b69d9-jn7k8\" (UID: \"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673526 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/01944eae-d386-4ca1-b6fd-bb27526cc5ff-images\") pod \"machine-config-operator-74547568cd-dtf8c\" (UID: \"01944eae-d386-4ca1-b6fd-bb27526cc5ff\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673543 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d98e9979-80de-4f7d-a420-989bc116ab5c-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tt47m\" (UID: \"d98e9979-80de-4f7d-a420-989bc116ab5c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673562 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-config\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673578 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-socket-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673596 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpxqh\" (UniqueName: \"kubernetes.io/projected/6ba5e948-9e8e-432b-9973-e0248bc2b82e-kube-api-access-rpxqh\") pod \"control-plane-machine-set-operator-78cbb6b69f-bx7nc\" (UID: \"6ba5e948-9e8e-432b-9973-e0248bc2b82e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673627 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/428a33f4-5469-49d6-a012-49c7656807a7-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-dgql6\" (UID: \"428a33f4-5469-49d6-a012-49c7656807a7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673650 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx22n\" (UniqueName: \"kubernetes.io/projected/d98e9979-80de-4f7d-a420-989bc116ab5c-kube-api-access-fx22n\") pod \"openshift-controller-manager-operator-756b6f6bc6-tt47m\" (UID: \"d98e9979-80de-4f7d-a420-989bc116ab5c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673671 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx59x\" (UniqueName: \"kubernetes.io/projected/9f40e8be-f418-4be6-912f-d732718730e7-kube-api-access-gx59x\") pod \"marketplace-operator-79b997595-mw698\" (UID: \"9f40e8be-f418-4be6-912f-d732718730e7\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673689 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hn22\" (UniqueName: \"kubernetes.io/projected/48c80085-ebde-44ec-a981-c024ce9d008d-kube-api-access-2hn22\") pod \"kube-storage-version-migrator-operator-b67b599dd-wjcc5\" (UID: \"48c80085-ebde-44ec-a981-c024ce9d008d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673705 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1259f14f-f93a-4765-9f93-ac4af158951e-config-volume\") pod \"collect-profiles-29420475-4g5jh\" (UID: \"1259f14f-f93a-4765-9f93-ac4af158951e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673722 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/81b4fd7c-89e0-4795-b372-2d859131320c-proxy-tls\") pod \"machine-config-controller-84d6567774-kxgdg\" (UID: \"81b4fd7c-89e0-4795-b372-2d859131320c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673740 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4c5991e-05aa-4110-92df-be3bd07d0a32-serving-cert\") pod \"service-ca-operator-777779d784-qxcg4\" (UID: \"e4c5991e-05aa-4110-92df-be3bd07d0a32\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673766 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-etcd-client\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673781 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81767e03-8fa4-4b32-822b-829600cb8114-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-msd8v\" (UID: \"81767e03-8fa4-4b32-822b-829600cb8114\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673795 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dk27p\" (UniqueName: \"kubernetes.io/projected/81767e03-8fa4-4b32-822b-829600cb8114-kube-api-access-dk27p\") pod \"openshift-apiserver-operator-796bbdcf4f-msd8v\" (UID: \"81767e03-8fa4-4b32-822b-829600cb8114\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673812 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f40e8be-f418-4be6-912f-d732718730e7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mw698\" (UID: \"9f40e8be-f418-4be6-912f-d732718730e7\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673839 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8a397f96-299f-4205-abb9-a261f73b1305-signing-key\") pod \"service-ca-9c57cc56f-x7c67\" (UID: \"8a397f96-299f-4205-abb9-a261f73b1305\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673859 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a61272d2-307f-4dcb-b98a-16b015c7c8bd-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zwxn4\" (UID: \"a61272d2-307f-4dcb-b98a-16b015c7c8bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673875 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/225dcce8-cf38-4809-b2de-41f62273bf8d-profile-collector-cert\") pod \"catalog-operator-68c6474976-fq6s8\" (UID: \"225dcce8-cf38-4809-b2de-41f62273bf8d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673894 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/60cbb4f5-7131-4242-8a30-e9706990636f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-gdkk9\" (UID: \"60cbb4f5-7131-4242-8a30-e9706990636f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gdkk9" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673913 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-serving-cert\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673932 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-registration-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673950 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlpzb\" (UniqueName: \"kubernetes.io/projected/eb28504c-22a1-4b85-87f4-3b5c14cc5785-kube-api-access-zlpzb\") pod \"cluster-image-registry-operator-dc59b4c8b-hfs55\" (UID: \"eb28504c-22a1-4b85-87f4-3b5c14cc5785\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673969 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3104e42f-18cf-41de-b704-8b63a4ae2a44-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nfncz\" (UID: \"3104e42f-18cf-41de-b704-8b63a4ae2a44\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.673985 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2573e7ee-4cff-4144-97d8-b19c2c5d1f42-metrics-tls\") pod \"dns-operator-744455d44c-tmgx2\" (UID: \"2573e7ee-4cff-4144-97d8-b19c2c5d1f42\") " pod="openshift-dns-operator/dns-operator-744455d44c-tmgx2" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674005 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a61272d2-307f-4dcb-b98a-16b015c7c8bd-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zwxn4\" (UID: \"a61272d2-307f-4dcb-b98a-16b015c7c8bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674063 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn7mj\" (UniqueName: \"kubernetes.io/projected/0fb52c9e-bb73-483a-a064-b1bf85f5e901-kube-api-access-dn7mj\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674097 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/5ecd7845-9751-4783-926a-7b6f6344a767-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-ct9x7\" (UID: \"5ecd7845-9751-4783-926a-7b6f6344a767\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674113 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/98f85866-57fd-4792-889d-1d16aea9d19e-cert\") pod \"ingress-canary-mt2rf\" (UID: \"98f85866-57fd-4792-889d-1d16aea9d19e\") " pod="openshift-ingress-canary/ingress-canary-mt2rf" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674130 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fp4w2\" (UniqueName: \"kubernetes.io/projected/36c61c78-9258-4664-89ed-f1f82a6f9e06-kube-api-access-fp4w2\") pod \"console-operator-58897d9998-2g5dn\" (UID: \"36c61c78-9258-4664-89ed-f1f82a6f9e06\") " pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674149 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl799\" (UniqueName: \"kubernetes.io/projected/3104e42f-18cf-41de-b704-8b63a4ae2a44-kube-api-access-jl799\") pod \"cluster-samples-operator-665b6dd947-nfncz\" (UID: \"3104e42f-18cf-41de-b704-8b63a4ae2a44\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674165 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nwpx\" (UniqueName: \"kubernetes.io/projected/e4c5991e-05aa-4110-92df-be3bd07d0a32-kube-api-access-4nwpx\") pod \"service-ca-operator-777779d784-qxcg4\" (UID: \"e4c5991e-05aa-4110-92df-be3bd07d0a32\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674181 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77pps\" (UniqueName: \"kubernetes.io/projected/98f85866-57fd-4792-889d-1d16aea9d19e-kube-api-access-77pps\") pod \"ingress-canary-mt2rf\" (UID: \"98f85866-57fd-4792-889d-1d16aea9d19e\") " pod="openshift-ingress-canary/ingress-canary-mt2rf" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674199 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/81b4fd7c-89e0-4795-b372-2d859131320c-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-kxgdg\" (UID: \"81b4fd7c-89e0-4795-b372-2d859131320c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674216 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqgg7\" (UniqueName: \"kubernetes.io/projected/01944eae-d386-4ca1-b6fd-bb27526cc5ff-kube-api-access-gqgg7\") pod \"machine-config-operator-74547568cd-dtf8c\" (UID: \"01944eae-d386-4ca1-b6fd-bb27526cc5ff\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674233 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a61272d2-307f-4dcb-b98a-16b015c7c8bd-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zwxn4\" (UID: \"a61272d2-307f-4dcb-b98a-16b015c7c8bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674251 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dqcm\" (UniqueName: \"kubernetes.io/projected/5ecd7845-9751-4783-926a-7b6f6344a767-kube-api-access-5dqcm\") pod \"package-server-manager-789f6589d5-ct9x7\" (UID: \"5ecd7845-9751-4783-926a-7b6f6344a767\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674268 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/eb28504c-22a1-4b85-87f4-3b5c14cc5785-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hfs55\" (UID: \"eb28504c-22a1-4b85-87f4-3b5c14cc5785\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674284 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/428a33f4-5469-49d6-a012-49c7656807a7-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-dgql6\" (UID: \"428a33f4-5469-49d6-a012-49c7656807a7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674301 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-mountpoint-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674325 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-etcd-ca\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674341 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2d947845-a2f2-4d22-be7f-da51b6edac44-webhook-cert\") pod \"packageserver-d55dfcdfc-nq68n\" (UID: \"2d947845-a2f2-4d22-be7f-da51b6edac44\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674358 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkb6s\" (UniqueName: \"kubernetes.io/projected/8a397f96-299f-4205-abb9-a261f73b1305-kube-api-access-qkb6s\") pod \"service-ca-9c57cc56f-x7c67\" (UID: \"8a397f96-299f-4205-abb9-a261f73b1305\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674375 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/2d947845-a2f2-4d22-be7f-da51b6edac44-tmpfs\") pod \"packageserver-d55dfcdfc-nq68n\" (UID: \"2d947845-a2f2-4d22-be7f-da51b6edac44\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674393 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48c80085-ebde-44ec-a981-c024ce9d008d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wjcc5\" (UID: \"48c80085-ebde-44ec-a981-c024ce9d008d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.674411 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4c5991e-05aa-4110-92df-be3bd07d0a32-config\") pod \"service-ca-operator-777779d784-qxcg4\" (UID: \"e4c5991e-05aa-4110-92df-be3bd07d0a32\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.675696 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-config\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.675848 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:58.175823897 +0000 UTC m=+140.038826040 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.676212 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-socket-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.679726 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-plugins-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.680473 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81767e03-8fa4-4b32-822b-829600cb8114-config\") pod \"openshift-apiserver-operator-796bbdcf4f-msd8v\" (UID: \"81767e03-8fa4-4b32-822b-829600cb8114\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.682960 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0fb52c9e-bb73-483a-a064-b1bf85f5e901-service-ca-bundle\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.685174 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-csi-data-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.685826 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4c5991e-05aa-4110-92df-be3bd07d0a32-config\") pod \"service-ca-operator-777779d784-qxcg4\" (UID: \"e4c5991e-05aa-4110-92df-be3bd07d0a32\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.685992 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d98e9979-80de-4f7d-a420-989bc116ab5c-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tt47m\" (UID: \"d98e9979-80de-4f7d-a420-989bc116ab5c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.686701 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1259f14f-f93a-4765-9f93-ac4af158951e-config-volume\") pod \"collect-profiles-29420475-4g5jh\" (UID: \"1259f14f-f93a-4765-9f93-ac4af158951e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.687371 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48c80085-ebde-44ec-a981-c024ce9d008d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wjcc5\" (UID: \"48c80085-ebde-44ec-a981-c024ce9d008d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.687722 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eb28504c-22a1-4b85-87f4-3b5c14cc5785-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hfs55\" (UID: \"eb28504c-22a1-4b85-87f4-3b5c14cc5785\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.689349 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/01944eae-d386-4ca1-b6fd-bb27526cc5ff-auth-proxy-config\") pod \"machine-config-operator-74547568cd-dtf8c\" (UID: \"01944eae-d386-4ca1-b6fd-bb27526cc5ff\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.691699 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f40e8be-f418-4be6-912f-d732718730e7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mw698\" (UID: \"9f40e8be-f418-4be6-912f-d732718730e7\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.692721 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-etcd-ca\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.693744 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-registration-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.693989 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-mountpoint-dir\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.695455 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36c61c78-9258-4664-89ed-f1f82a6f9e06-config\") pod \"console-operator-58897d9998-2g5dn\" (UID: \"36c61c78-9258-4664-89ed-f1f82a6f9e06\") " pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.695987 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8a397f96-299f-4205-abb9-a261f73b1305-signing-cabundle\") pod \"service-ca-9c57cc56f-x7c67\" (UID: \"8a397f96-299f-4205-abb9-a261f73b1305\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.696078 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/2d947845-a2f2-4d22-be7f-da51b6edac44-tmpfs\") pod \"packageserver-d55dfcdfc-nq68n\" (UID: \"2d947845-a2f2-4d22-be7f-da51b6edac44\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.696125 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-etcd-service-ca\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.696392 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f40e8be-f418-4be6-912f-d732718730e7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mw698\" (UID: \"9f40e8be-f418-4be6-912f-d732718730e7\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.696785 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/428a33f4-5469-49d6-a012-49c7656807a7-config\") pod \"kube-apiserver-operator-766d6c64bb-dgql6\" (UID: \"428a33f4-5469-49d6-a012-49c7656807a7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.697602 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a61272d2-307f-4dcb-b98a-16b015c7c8bd-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zwxn4\" (UID: \"a61272d2-307f-4dcb-b98a-16b015c7c8bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.697483 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/450b96f9-f5a6-485f-a3fd-ae5a408b2b8d-trusted-ca\") pod \"ingress-operator-5b745b69d9-jn7k8\" (UID: \"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.698667 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/81b4fd7c-89e0-4795-b372-2d859131320c-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-kxgdg\" (UID: \"81b4fd7c-89e0-4795-b372-2d859131320c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.699582 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a26f0c32-76e5-4b97-9592-e23abb23fafa-node-bootstrap-token\") pod \"machine-config-server-6qbtj\" (UID: \"a26f0c32-76e5-4b97-9592-e23abb23fafa\") " pod="openshift-machine-config-operator/machine-config-server-6qbtj" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.702614 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/36c61c78-9258-4664-89ed-f1f82a6f9e06-trusted-ca\") pod \"console-operator-58897d9998-2g5dn\" (UID: \"36c61c78-9258-4664-89ed-f1f82a6f9e06\") " pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.702745 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/01944eae-d386-4ca1-b6fd-bb27526cc5ff-images\") pod \"machine-config-operator-74547568cd-dtf8c\" (UID: \"01944eae-d386-4ca1-b6fd-bb27526cc5ff\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.703307 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/450b96f9-f5a6-485f-a3fd-ae5a408b2b8d-metrics-tls\") pod \"ingress-operator-5b745b69d9-jn7k8\" (UID: \"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.703929 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/225dcce8-cf38-4809-b2de-41f62273bf8d-srv-cert\") pod \"catalog-operator-68c6474976-fq6s8\" (UID: \"225dcce8-cf38-4809-b2de-41f62273bf8d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.704784 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48c80085-ebde-44ec-a981-c024ce9d008d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wjcc5\" (UID: \"48c80085-ebde-44ec-a981-c024ce9d008d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.706688 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/eb28504c-22a1-4b85-87f4-3b5c14cc5785-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hfs55\" (UID: \"eb28504c-22a1-4b85-87f4-3b5c14cc5785\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.707326 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8a397f96-299f-4205-abb9-a261f73b1305-signing-key\") pod \"service-ca-9c57cc56f-x7c67\" (UID: \"8a397f96-299f-4205-abb9-a261f73b1305\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.707345 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36c61c78-9258-4664-89ed-f1f82a6f9e06-serving-cert\") pod \"console-operator-58897d9998-2g5dn\" (UID: \"36c61c78-9258-4664-89ed-f1f82a6f9e06\") " pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.707646 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a61272d2-307f-4dcb-b98a-16b015c7c8bd-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zwxn4\" (UID: \"a61272d2-307f-4dcb-b98a-16b015c7c8bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.707851 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpxqh\" (UniqueName: \"kubernetes.io/projected/6ba5e948-9e8e-432b-9973-e0248bc2b82e-kube-api-access-rpxqh\") pod \"control-plane-machine-set-operator-78cbb6b69f-bx7nc\" (UID: \"6ba5e948-9e8e-432b-9973-e0248bc2b82e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.707859 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/81b4fd7c-89e0-4795-b372-2d859131320c-proxy-tls\") pod \"machine-config-controller-84d6567774-kxgdg\" (UID: \"81b4fd7c-89e0-4795-b372-2d859131320c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.708308 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-etcd-client\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.708450 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/428a33f4-5469-49d6-a012-49c7656807a7-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-dgql6\" (UID: \"428a33f4-5469-49d6-a012-49c7656807a7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.708867 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d98e9979-80de-4f7d-a420-989bc116ab5c-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tt47m\" (UID: \"d98e9979-80de-4f7d-a420-989bc116ab5c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.708893 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/0fb52c9e-bb73-483a-a064-b1bf85f5e901-default-certificate\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.709444 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/225dcce8-cf38-4809-b2de-41f62273bf8d-profile-collector-cert\") pod \"catalog-operator-68c6474976-fq6s8\" (UID: \"225dcce8-cf38-4809-b2de-41f62273bf8d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.709518 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6ba5e948-9e8e-432b-9973-e0248bc2b82e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bx7nc\" (UID: \"6ba5e948-9e8e-432b-9973-e0248bc2b82e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.709527 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a26f0c32-76e5-4b97-9592-e23abb23fafa-certs\") pod \"machine-config-server-6qbtj\" (UID: \"a26f0c32-76e5-4b97-9592-e23abb23fafa\") " pod="openshift-machine-config-operator/machine-config-server-6qbtj" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.709646 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/5ecd7845-9751-4783-926a-7b6f6344a767-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-ct9x7\" (UID: \"5ecd7845-9751-4783-926a-7b6f6344a767\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.709928 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2573e7ee-4cff-4144-97d8-b19c2c5d1f42-metrics-tls\") pod \"dns-operator-744455d44c-tmgx2\" (UID: \"2573e7ee-4cff-4144-97d8-b19c2c5d1f42\") " pod="openshift-dns-operator/dns-operator-744455d44c-tmgx2" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.709991 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/60cbb4f5-7131-4242-8a30-e9706990636f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-gdkk9\" (UID: \"60cbb4f5-7131-4242-8a30-e9706990636f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gdkk9" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.710056 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2d947845-a2f2-4d22-be7f-da51b6edac44-webhook-cert\") pod \"packageserver-d55dfcdfc-nq68n\" (UID: \"2d947845-a2f2-4d22-be7f-da51b6edac44\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.710371 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1259f14f-f93a-4765-9f93-ac4af158951e-secret-volume\") pod \"collect-profiles-29420475-4g5jh\" (UID: \"1259f14f-f93a-4765-9f93-ac4af158951e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.710405 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/0fb52c9e-bb73-483a-a064-b1bf85f5e901-stats-auth\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.710536 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2d947845-a2f2-4d22-be7f-da51b6edac44-apiservice-cert\") pod \"packageserver-d55dfcdfc-nq68n\" (UID: \"2d947845-a2f2-4d22-be7f-da51b6edac44\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.710848 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/98f85866-57fd-4792-889d-1d16aea9d19e-cert\") pod \"ingress-canary-mt2rf\" (UID: \"98f85866-57fd-4792-889d-1d16aea9d19e\") " pod="openshift-ingress-canary/ingress-canary-mt2rf" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.711456 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-serving-cert\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.712337 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0fb52c9e-bb73-483a-a064-b1bf85f5e901-metrics-certs\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.712897 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/01944eae-d386-4ca1-b6fd-bb27526cc5ff-proxy-tls\") pod \"machine-config-operator-74547568cd-dtf8c\" (UID: \"01944eae-d386-4ca1-b6fd-bb27526cc5ff\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.715784 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4c5991e-05aa-4110-92df-be3bd07d0a32-serving-cert\") pod \"service-ca-operator-777779d784-qxcg4\" (UID: \"e4c5991e-05aa-4110-92df-be3bd07d0a32\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.718200 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81767e03-8fa4-4b32-822b-829600cb8114-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-msd8v\" (UID: \"81767e03-8fa4-4b32-822b-829600cb8114\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.718607 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3104e42f-18cf-41de-b704-8b63a4ae2a44-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nfncz\" (UID: \"3104e42f-18cf-41de-b704-8b63a4ae2a44\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.745771 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9z6l\" (UniqueName: \"kubernetes.io/projected/6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae-kube-api-access-j9z6l\") pod \"etcd-operator-b45778765-96qt5\" (UID: \"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae\") " pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.767292 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlzwg\" (UniqueName: \"kubernetes.io/projected/2d947845-a2f2-4d22-be7f-da51b6edac44-kube-api-access-dlzwg\") pod \"packageserver-d55dfcdfc-nq68n\" (UID: \"2d947845-a2f2-4d22-be7f-da51b6edac44\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.776104 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.776717 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:58.276701406 +0000 UTC m=+140.139703489 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.785822 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.792694 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.797006 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxqq4\" (UniqueName: \"kubernetes.io/projected/c189484f-e280-4468-b815-f56ef70e07eb-kube-api-access-pxqq4\") pod \"migrator-59844c95c7-lltjv\" (UID: \"c189484f-e280-4468-b815-f56ef70e07eb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lltjv" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.809087 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx22n\" (UniqueName: \"kubernetes.io/projected/d98e9979-80de-4f7d-a420-989bc116ab5c-kube-api-access-fx22n\") pod \"openshift-controller-manager-operator-756b6f6bc6-tt47m\" (UID: \"d98e9979-80de-4f7d-a420-989bc116ab5c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.819802 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-46knq"] Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.835597 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzvqq\" (UniqueName: \"kubernetes.io/projected/60cbb4f5-7131-4242-8a30-e9706990636f-kube-api-access-qzvqq\") pod \"multus-admission-controller-857f4d67dd-gdkk9\" (UID: \"60cbb4f5-7131-4242-8a30-e9706990636f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gdkk9" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.853045 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqgg7\" (UniqueName: \"kubernetes.io/projected/01944eae-d386-4ca1-b6fd-bb27526cc5ff-kube-api-access-gqgg7\") pod \"machine-config-operator-74547568cd-dtf8c\" (UID: \"01944eae-d386-4ca1-b6fd-bb27526cc5ff\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.854959 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7"] Dec 08 21:20:57 crc kubenswrapper[4912]: W1208 21:20:57.863259 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1627b83a_6756_4797_b857_7495c262d53c.slice/crio-9d468ca7f933ce9d7be35b11ac2d8a9d2aa89d76d2cf70901ced70a91e67261b WatchSource:0}: Error finding container 9d468ca7f933ce9d7be35b11ac2d8a9d2aa89d76d2cf70901ced70a91e67261b: Status 404 returned error can't find the container with id 9d468ca7f933ce9d7be35b11ac2d8a9d2aa89d76d2cf70901ced70a91e67261b Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.873489 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a61272d2-307f-4dcb-b98a-16b015c7c8bd-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zwxn4\" (UID: \"a61272d2-307f-4dcb-b98a-16b015c7c8bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.879479 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.880268 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:58.380246581 +0000 UTC m=+140.243248654 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.887374 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.891772 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj657\" (UniqueName: \"kubernetes.io/projected/450b96f9-f5a6-485f-a3fd-ae5a408b2b8d-kube-api-access-zj657\") pod \"ingress-operator-5b745b69d9-jn7k8\" (UID: \"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.904444 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.906439 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-s2plb"] Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.907645 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgsc6\" (UniqueName: \"kubernetes.io/projected/2330bc56-ffb4-4f12-8fd3-0ff9d95307c6-kube-api-access-hgsc6\") pod \"csi-hostpathplugin-7lvnt\" (UID: \"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6\") " pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.922834 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-gdkk9" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.946139 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eb28504c-22a1-4b85-87f4-3b5c14cc5785-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hfs55\" (UID: \"eb28504c-22a1-4b85-87f4-3b5c14cc5785\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.954852 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qj4wt\" (UniqueName: \"kubernetes.io/projected/2573e7ee-4cff-4144-97d8-b19c2c5d1f42-kube-api-access-qj4wt\") pod \"dns-operator-744455d44c-tmgx2\" (UID: \"2573e7ee-4cff-4144-97d8-b19c2c5d1f42\") " pod="openshift-dns-operator/dns-operator-744455d44c-tmgx2" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.967019 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dhqm\" (UniqueName: \"kubernetes.io/projected/1259f14f-f93a-4765-9f93-ac4af158951e-kube-api-access-7dhqm\") pod \"collect-profiles-29420475-4g5jh\" (UID: \"1259f14f-f93a-4765-9f93-ac4af158951e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:20:57 crc kubenswrapper[4912]: I1208 21:20:57.990582 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:57 crc kubenswrapper[4912]: E1208 21:20:57.991229 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:58.491161978 +0000 UTC m=+140.354164061 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.011250 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lltjv" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.011827 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.014531 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77bxp\" (UniqueName: \"kubernetes.io/projected/225dcce8-cf38-4809-b2de-41f62273bf8d-kube-api-access-77bxp\") pod \"catalog-operator-68c6474976-fq6s8\" (UID: \"225dcce8-cf38-4809-b2de-41f62273bf8d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.029971 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dk27p\" (UniqueName: \"kubernetes.io/projected/81767e03-8fa4-4b32-822b-829600cb8114-kube-api-access-dk27p\") pod \"openshift-apiserver-operator-796bbdcf4f-msd8v\" (UID: \"81767e03-8fa4-4b32-822b-829600cb8114\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.038801 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.045349 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.046078 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dqcm\" (UniqueName: \"kubernetes.io/projected/5ecd7845-9751-4783-926a-7b6f6344a767-kube-api-access-5dqcm\") pod \"package-server-manager-789f6589d5-ct9x7\" (UID: \"5ecd7845-9751-4783-926a-7b6f6344a767\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.047553 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkb6s\" (UniqueName: \"kubernetes.io/projected/8a397f96-299f-4205-abb9-a261f73b1305-kube-api-access-qkb6s\") pod \"service-ca-9c57cc56f-x7c67\" (UID: \"8a397f96-299f-4205-abb9-a261f73b1305\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.055646 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.078282 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.081909 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl799\" (UniqueName: \"kubernetes.io/projected/3104e42f-18cf-41de-b704-8b63a4ae2a44-kube-api-access-jl799\") pod \"cluster-samples-operator-665b6dd947-nfncz\" (UID: \"3104e42f-18cf-41de-b704-8b63a4ae2a44\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.093177 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:58 crc kubenswrapper[4912]: E1208 21:20:58.094553 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:58.594527489 +0000 UTC m=+140.457529582 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.103772 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/428a33f4-5469-49d6-a012-49c7656807a7-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-dgql6\" (UID: \"428a33f4-5469-49d6-a012-49c7656807a7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.109875 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ns6xj\" (UniqueName: \"kubernetes.io/projected/81b4fd7c-89e0-4795-b372-2d859131320c-kube-api-access-ns6xj\") pod \"machine-config-controller-84d6567774-kxgdg\" (UID: \"81b4fd7c-89e0-4795-b372-2d859131320c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.115259 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.131435 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlpzb\" (UniqueName: \"kubernetes.io/projected/eb28504c-22a1-4b85-87f4-3b5c14cc5785-kube-api-access-zlpzb\") pod \"cluster-image-registry-operator-dc59b4c8b-hfs55\" (UID: \"eb28504c-22a1-4b85-87f4-3b5c14cc5785\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.161311 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.173578 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.186675 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.194272 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-tmgx2" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.195005 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:58 crc kubenswrapper[4912]: E1208 21:20:58.195443 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:58.695429949 +0000 UTC m=+140.558432032 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.253522 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.266635 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn7mj\" (UniqueName: \"kubernetes.io/projected/0fb52c9e-bb73-483a-a064-b1bf85f5e901-kube-api-access-dn7mj\") pod \"router-default-5444994796-b77mr\" (UID: \"0fb52c9e-bb73-483a-a064-b1bf85f5e901\") " pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.267582 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx59x\" (UniqueName: \"kubernetes.io/projected/9f40e8be-f418-4be6-912f-d732718730e7-kube-api-access-gx59x\") pod \"marketplace-operator-79b997595-mw698\" (UID: \"9f40e8be-f418-4be6-912f-d732718730e7\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.270386 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.276581 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc9hz\" (UniqueName: \"kubernetes.io/projected/a26f0c32-76e5-4b97-9592-e23abb23fafa-kube-api-access-hc9hz\") pod \"machine-config-server-6qbtj\" (UID: \"a26f0c32-76e5-4b97-9592-e23abb23fafa\") " pod="openshift-machine-config-operator/machine-config-server-6qbtj" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.286812 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nwpx\" (UniqueName: \"kubernetes.io/projected/e4c5991e-05aa-4110-92df-be3bd07d0a32-kube-api-access-4nwpx\") pod \"service-ca-operator-777779d784-qxcg4\" (UID: \"e4c5991e-05aa-4110-92df-be3bd07d0a32\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.286988 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fp4w2\" (UniqueName: \"kubernetes.io/projected/36c61c78-9258-4664-89ed-f1f82a6f9e06-kube-api-access-fp4w2\") pod \"console-operator-58897d9998-2g5dn\" (UID: \"36c61c78-9258-4664-89ed-f1f82a6f9e06\") " pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.297508 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.297928 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77pps\" (UniqueName: \"kubernetes.io/projected/98f85866-57fd-4792-889d-1d16aea9d19e-kube-api-access-77pps\") pod \"ingress-canary-mt2rf\" (UID: \"98f85866-57fd-4792-889d-1d16aea9d19e\") " pod="openshift-ingress-canary/ingress-canary-mt2rf" Dec 08 21:20:58 crc kubenswrapper[4912]: E1208 21:20:58.297967 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:58.797950842 +0000 UTC m=+140.660952935 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.306134 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.315897 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/450b96f9-f5a6-485f-a3fd-ae5a408b2b8d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jn7k8\" (UID: \"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.324026 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6qbtj" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.328611 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.338796 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4"] Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.351576 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hn22\" (UniqueName: \"kubernetes.io/projected/48c80085-ebde-44ec-a981-c024ce9d008d-kube-api-access-2hn22\") pod \"kube-storage-version-migrator-operator-b67b599dd-wjcc5\" (UID: \"48c80085-ebde-44ec-a981-c024ce9d008d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.355015 4912 generic.go:334] "Generic (PLEG): container finished" podID="58a44c46-96a6-4c80-b2b8-2bf7f66c3259" containerID="37fd9a4186d23f8d1dc79b8a28f9f626e26df29ca2344c00bb497262c41b6b88" exitCode=0 Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.355179 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" event={"ID":"58a44c46-96a6-4c80-b2b8-2bf7f66c3259","Type":"ContainerDied","Data":"37fd9a4186d23f8d1dc79b8a28f9f626e26df29ca2344c00bb497262c41b6b88"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.360814 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.368406 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.385370 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-8rkpr" event={"ID":"b17a753b-7754-47a9-8432-0f1fab0fb3be","Type":"ContainerStarted","Data":"3e054f6e90f5d69f6b0feddff4e436bd34750d04c241f242b5bf683208a25cde"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.385417 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-8rkpr" event={"ID":"b17a753b-7754-47a9-8432-0f1fab0fb3be","Type":"ContainerStarted","Data":"65615966e429414519792db22193fe4390c3e1509bdd440543dc6e85f60fbb06"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.388383 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-8rkpr" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.391155 4912 patch_prober.go:28] interesting pod/downloads-7954f5f757-8rkpr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.391204 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8rkpr" podUID="b17a753b-7754-47a9-8432-0f1fab0fb3be" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.399559 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:58 crc kubenswrapper[4912]: E1208 21:20:58.399912 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:58.899901074 +0000 UTC m=+140.762903157 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.406090 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.435366 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.465105 4912 generic.go:334] "Generic (PLEG): container finished" podID="b20ccfdc-c499-40e4-9a0b-bebc13394494" containerID="4eb1f175afeb9f99e2fec600f59139479d513f2c4dbd011f9119dfde9479f1d0" exitCode=0 Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.476236 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" event={"ID":"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21","Type":"ContainerStarted","Data":"b0519213ccd8bac7314cf5f9de2d69da7aca13b8460647b58e22522bac3cd839"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.476290 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-46knq" event={"ID":"1627b83a-6756-4797-b857-7495c262d53c","Type":"ContainerStarted","Data":"bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.476301 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-46knq" event={"ID":"1627b83a-6756-4797-b857-7495c262d53c","Type":"ContainerStarted","Data":"9d468ca7f933ce9d7be35b11ac2d8a9d2aa89d76d2cf70901ced70a91e67261b"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.476311 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" event={"ID":"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d","Type":"ContainerStarted","Data":"d01877e01e930f971e5ef8e15e939899479e8738b4e84bdc70808a4986f5175f"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.476394 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" event={"ID":"294c26d9-93fd-4ca1-8744-0bb56db49b94","Type":"ContainerStarted","Data":"f7611c412990b2e17a08ac14fa5a824d27def543383dc245e3e462cfd964b4a6"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.476406 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" event={"ID":"294c26d9-93fd-4ca1-8744-0bb56db49b94","Type":"ContainerStarted","Data":"23efbed795aa71f64fea0dcfa28d669a10ae5f3387e68725ff4f984ff79a6aff"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.476415 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" event={"ID":"b20ccfdc-c499-40e4-9a0b-bebc13394494","Type":"ContainerDied","Data":"4eb1f175afeb9f99e2fec600f59139479d513f2c4dbd011f9119dfde9479f1d0"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.484840 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" event={"ID":"5c9f4edc-e900-4571-8d81-4f253bfe8ea1","Type":"ContainerStarted","Data":"13787ba63ac992d46e9e5fa388cbd69b5ead157ad2a512c4a27b8f0da27f4585"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.484900 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" event={"ID":"5c9f4edc-e900-4571-8d81-4f253bfe8ea1","Type":"ContainerStarted","Data":"4d5142bc200fcc2090dc804d256a32a530db114b0ffc1f971b2a3349edcc7295"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.488276 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" event={"ID":"c5248f2d-ec3c-436a-9c01-5bef4382ca4f","Type":"ContainerStarted","Data":"b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.488352 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" event={"ID":"c5248f2d-ec3c-436a-9c01-5bef4382ca4f","Type":"ContainerStarted","Data":"df0974cbb551614afc0ec58452916b4fb801900ad4f92898bed555db82e1404d"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.488374 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.491557 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" event={"ID":"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b","Type":"ContainerStarted","Data":"2c93304a07b437ad4518ff159b33300b36a27cc3727bc3e403d78a76e7950668"} Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.492567 4912 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-sm5lg container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.492616 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" podUID="c5248f2d-ec3c-436a-9c01-5bef4382ca4f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.500837 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.504237 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-s2plb" event={"ID":"1b7b4ef5-c90c-459b-8285-d5163a0e9b78","Type":"ContainerStarted","Data":"6bda92e04b106943238c8d2953256fa19d22d47dc334973d43e221d1ecdbe64d"} Dec 08 21:20:58 crc kubenswrapper[4912]: E1208 21:20:58.504845 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.004819237 +0000 UTC m=+140.867821320 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.521569 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc"] Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.522880 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw"] Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.542724 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.567271 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-mt2rf" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.610173 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:58 crc kubenswrapper[4912]: E1208 21:20:58.610663 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.110641989 +0000 UTC m=+140.973644072 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.630846 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.642964 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-96qt5"] Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.700280 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-bfkpt"] Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.716884 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:58 crc kubenswrapper[4912]: E1208 21:20:58.717505 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.217465231 +0000 UTC m=+141.080467314 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.821574 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:58 crc kubenswrapper[4912]: E1208 21:20:58.822056 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.322025446 +0000 UTC m=+141.185027529 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.913024 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-46knq" podStartSLOduration=121.91300167200001 podStartE2EDuration="2m1.913001672s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:58.912611594 +0000 UTC m=+140.775613677" watchObservedRunningTime="2025-12-08 21:20:58.913001672 +0000 UTC m=+140.776003755" Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.928485 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:58 crc kubenswrapper[4912]: E1208 21:20:58.928655 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.428632904 +0000 UTC m=+141.291634987 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.929259 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:58 crc kubenswrapper[4912]: E1208 21:20:58.931445 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.431429552 +0000 UTC m=+141.294431635 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:58 crc kubenswrapper[4912]: I1208 21:20:58.998542 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.030565 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.031521 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.531475964 +0000 UTC m=+141.394478057 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.132417 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.132892 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.632869404 +0000 UTC m=+141.495871487 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.220939 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-gdkk9"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.235952 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.236144 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.736115483 +0000 UTC m=+141.599117576 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.236532 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.236908 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.736897089 +0000 UTC m=+141.599899182 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.251330 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.253190 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.253795 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.255407 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-lltjv"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.266927 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.337750 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.337968 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.837944812 +0000 UTC m=+141.700946895 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.338625 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.339155 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.839143447 +0000 UTC m=+141.702145530 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.346458 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-d848b" podStartSLOduration=122.346430757 podStartE2EDuration="2m2.346430757s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:59.345062199 +0000 UTC m=+141.208064292" watchObservedRunningTime="2025-12-08 21:20:59.346430757 +0000 UTC m=+141.209432840" Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.436330 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-8rkpr" podStartSLOduration=122.43629074 podStartE2EDuration="2m2.43629074s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:59.393272103 +0000 UTC m=+141.256274186" watchObservedRunningTime="2025-12-08 21:20:59.43629074 +0000 UTC m=+141.299292843" Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.441649 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.442176 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:20:59.94215381 +0000 UTC m=+141.805155893 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.575643 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.576046 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:00.07602057 +0000 UTC m=+141.939022653 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.585589 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-tmgx2"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.619521 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" event={"ID":"1259f14f-f93a-4765-9f93-ac4af158951e","Type":"ContainerStarted","Data":"81b214e583fffc7bd8adc0851554024c0fdf76da36e3a00c328609c3845ede76"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.626134 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.644576 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7lvnt"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.650427 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg"] Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.657404 4912 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b7b4ef5_c90c_459b_8285_d5163a0e9b78.slice/crio-conmon-a16585e7e460a1ce8959886c4c16e7911c79ce68dd44bcdcdb45b620cb5be7f5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b7b4ef5_c90c_459b_8285_d5163a0e9b78.slice/crio-a16585e7e460a1ce8959886c4c16e7911c79ce68dd44bcdcdb45b620cb5be7f5.scope\": RecentStats: unable to find data in memory cache]" Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.662414 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2g5dn"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.663992 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6qbtj" event={"ID":"a26f0c32-76e5-4b97-9592-e23abb23fafa","Type":"ContainerStarted","Data":"40ffd208be7f062130b079ba8cdb4895ce66251d92cf8023287c29790ab4aa25"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.664061 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6qbtj" event={"ID":"a26f0c32-76e5-4b97-9592-e23abb23fafa","Type":"ContainerStarted","Data":"41a396c91678eff7a58626aa94ec8d80d6fc1df47a36e73a9204507820f8cee9"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.664208 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" podStartSLOduration=122.664180438 podStartE2EDuration="2m2.664180438s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:59.59202387 +0000 UTC m=+141.455025973" watchObservedRunningTime="2025-12-08 21:20:59.664180438 +0000 UTC m=+141.527182521" Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.667077 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.679528 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.680441 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:00.180423673 +0000 UTC m=+142.043425756 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.684110 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.687887 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.688761 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" event={"ID":"58a44c46-96a6-4c80-b2b8-2bf7f66c3259","Type":"ContainerStarted","Data":"cd0ada38ff94ab19e28a11b8cf9ee652dc72f5021e610bab23478a6580d3bdb4"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.689586 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.718425 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lltjv" event={"ID":"c189484f-e280-4468-b815-f56ef70e07eb","Type":"ContainerStarted","Data":"6e9f5820ff91e834a770cfbcc31d4b51153c4c9d24dd13482cc75b44ab4b6a73"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.758123 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-kxhsq" podStartSLOduration=123.758023682 podStartE2EDuration="2m3.758023682s" podCreationTimestamp="2025-12-08 21:18:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:20:59.749471196 +0000 UTC m=+141.612473279" watchObservedRunningTime="2025-12-08 21:20:59.758023682 +0000 UTC m=+141.621025785" Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.787004 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.789573 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:00.289554082 +0000 UTC m=+142.152556165 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:59 crc kubenswrapper[4912]: W1208 21:20:59.813613 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a397f96_299f_4205_abb9_a261f73b1305.slice/crio-4eca51242a4ead50bc0e67fcfbb9ae4cd545a6d82766c3ce7e03acb6e3826561 WatchSource:0}: Error finding container 4eca51242a4ead50bc0e67fcfbb9ae4cd545a6d82766c3ce7e03acb6e3826561: Status 404 returned error can't find the container with id 4eca51242a4ead50bc0e67fcfbb9ae4cd545a6d82766c3ce7e03acb6e3826561 Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.816963 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-x7c67"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.817064 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.817085 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-gdkk9" event={"ID":"60cbb4f5-7131-4242-8a30-e9706990636f","Type":"ContainerStarted","Data":"25c063402ec25f436b3d387976126fc8e39979d2041709016dfdab3dba2a619f"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.817116 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" event={"ID":"2d947845-a2f2-4d22-be7f-da51b6edac44","Type":"ContainerStarted","Data":"9ffd74b1dff3b1675e73afd699c065a48ccba7c49450d8c99c8eac5133d4e10f"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.817139 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.817157 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mw698"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.817176 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" event={"ID":"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21","Type":"ContainerStarted","Data":"1bcbae8b0dcd7437c23da7759fd09c13dcc620a750190682ba6496b449b01dd4"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.817190 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-mt2rf"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.844708 4912 generic.go:334] "Generic (PLEG): container finished" podID="1b7b4ef5-c90c-459b-8285-d5163a0e9b78" containerID="a16585e7e460a1ce8959886c4c16e7911c79ce68dd44bcdcdb45b620cb5be7f5" exitCode=0 Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.844836 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-s2plb" event={"ID":"1b7b4ef5-c90c-459b-8285-d5163a0e9b78","Type":"ContainerDied","Data":"a16585e7e460a1ce8959886c4c16e7911c79ce68dd44bcdcdb45b620cb5be7f5"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.855613 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" event={"ID":"d98e9979-80de-4f7d-a420-989bc116ab5c","Type":"ContainerStarted","Data":"332da4b1866fea81e4f224557303c06772f118a65a0812df3315c05b35af030a"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.862258 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.889457 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" event={"ID":"c4b8e2bd-4e87-4797-b33c-7bb36c40d59d","Type":"ContainerStarted","Data":"551954325ff42742b356e9f5b9f11d14852c50cd1e6a19dbb4814f714ffac098"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.892465 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.892660 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:00.392639377 +0000 UTC m=+142.255641460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.892868 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.893295 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:00.393285111 +0000 UTC m=+142.256287194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.898484 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-b77mr" event={"ID":"0fb52c9e-bb73-483a-a064-b1bf85f5e901","Type":"ContainerStarted","Data":"b74208ac63a67ceecb130d7a66eaa01b3ac71b924378148e6a17c02b964c2ae7"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.902410 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc" event={"ID":"6ba5e948-9e8e-432b-9973-e0248bc2b82e","Type":"ContainerStarted","Data":"97cdde15aa38516ba0538b2e7d5a38bbb47da7e5bd2e0ab4b567d74f6f01d2e5"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.933953 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" event={"ID":"9e28a674-c730-4538-bb06-d7df2c82cc6e","Type":"ContainerStarted","Data":"33dc53e60bc35315905b906c01da3c648837f6de372fc40d6b1186d815c190ce"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.934056 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" event={"ID":"9e28a674-c730-4538-bb06-d7df2c82cc6e","Type":"ContainerStarted","Data":"0ba0fb228c91625b3ef91da59dcbbf97dd3001333041caa97d898fa8561af898"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.934309 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.944055 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" event={"ID":"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae","Type":"ContainerStarted","Data":"c7bffcfbdc6aa4dc89a4e348b8325fecf4426d005bd21d2368563d093ebbe698"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.948889 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" event={"ID":"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b","Type":"ContainerStarted","Data":"244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.950755 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.960411 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" event={"ID":"a61272d2-307f-4dcb-b98a-16b015c7c8bd","Type":"ContainerStarted","Data":"0292940ef5679ae8e4f248db077770689f7b21d211eb2c5d4217df6f14d0d9c1"} Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.989026 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7"] Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.989369 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" Dec 08 21:20:59 crc kubenswrapper[4912]: I1208 21:20:59.993771 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:20:59 crc kubenswrapper[4912]: E1208 21:20:59.994892 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:00.494870295 +0000 UTC m=+142.357872378 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.010507 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" event={"ID":"f0ff0e40-991b-434a-bb8b-ae2bb3f6b559","Type":"ContainerStarted","Data":"f102d938d428db3e5864f4048a79f883be90385ab5cc621c423d3f2601a3f343"} Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.032245 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-bfkpt" event={"ID":"49c88b45-1149-4b8a-b390-d817da5ae936","Type":"ContainerStarted","Data":"3ac0832c3d3c9baa74107c682f5e6da03cc13853369a4599d02c10a210fc5648"} Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.038477 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4"] Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.069685 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8"] Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.084969 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" event={"ID":"01944eae-d386-4ca1-b6fd-bb27526cc5ff","Type":"ContainerStarted","Data":"faa2121f371f1753f66d14061dfca7950b0fa17faa79331417b012ae4b2144be"} Dec 08 21:21:00 crc kubenswrapper[4912]: W1208 21:21:00.086024 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4c5991e_05aa_4110_92df_be3bd07d0a32.slice/crio-d4167f2d226516f32b8cd65c925b1d37e6e9d48c2555cd63ac00ef56b2a40c52 WatchSource:0}: Error finding container d4167f2d226516f32b8cd65c925b1d37e6e9d48c2555cd63ac00ef56b2a40c52: Status 404 returned error can't find the container with id d4167f2d226516f32b8cd65c925b1d37e6e9d48c2555cd63ac00ef56b2a40c52 Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.092346 4912 patch_prober.go:28] interesting pod/downloads-7954f5f757-8rkpr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.092399 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8rkpr" podUID="b17a753b-7754-47a9-8432-0f1fab0fb3be" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.096250 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:00 crc kubenswrapper[4912]: E1208 21:21:00.098196 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:00.598165194 +0000 UTC m=+142.461167277 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.104580 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.156957 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-x4zf4" podStartSLOduration=123.156934816 podStartE2EDuration="2m3.156934816s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:00.155502967 +0000 UTC m=+142.018505050" watchObservedRunningTime="2025-12-08 21:21:00.156934816 +0000 UTC m=+142.019936899" Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.178096 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" podStartSLOduration=123.178073912 podStartE2EDuration="2m3.178073912s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:00.174851185 +0000 UTC m=+142.037853258" watchObservedRunningTime="2025-12-08 21:21:00.178073912 +0000 UTC m=+142.041075995" Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.199687 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:00 crc kubenswrapper[4912]: E1208 21:21:00.201376 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:00.701341941 +0000 UTC m=+142.564344024 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.224890 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" podStartSLOduration=123.224860966 podStartE2EDuration="2m3.224860966s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:00.212708926 +0000 UTC m=+142.075711009" watchObservedRunningTime="2025-12-08 21:21:00.224860966 +0000 UTC m=+142.087863049" Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.302082 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:00 crc kubenswrapper[4912]: E1208 21:21:00.302444 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:00.802431075 +0000 UTC m=+142.665433158 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.332525 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-52g7k" podStartSLOduration=124.332503585 podStartE2EDuration="2m4.332503585s" podCreationTimestamp="2025-12-08 21:18:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:00.292903869 +0000 UTC m=+142.155905952" watchObservedRunningTime="2025-12-08 21:21:00.332503585 +0000 UTC m=+142.195505658" Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.335119 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" podStartSLOduration=124.335094769 podStartE2EDuration="2m4.335094769s" podCreationTimestamp="2025-12-08 21:18:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:00.332949295 +0000 UTC m=+142.195951378" watchObservedRunningTime="2025-12-08 21:21:00.335094769 +0000 UTC m=+142.198096862" Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.405639 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:00 crc kubenswrapper[4912]: E1208 21:21:00.406294 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:00.906270196 +0000 UTC m=+142.769272279 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.482716 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-6qbtj" podStartSLOduration=5.482692382 podStartE2EDuration="5.482692382s" podCreationTimestamp="2025-12-08 21:20:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:00.481575619 +0000 UTC m=+142.344577702" watchObservedRunningTime="2025-12-08 21:21:00.482692382 +0000 UTC m=+142.345694465" Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.493261 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.509059 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:00 crc kubenswrapper[4912]: E1208 21:21:00.509406 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:01.009392462 +0000 UTC m=+142.872394545 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.611660 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:00 crc kubenswrapper[4912]: E1208 21:21:00.612157 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:01.11213353 +0000 UTC m=+142.975135613 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.689333 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.714006 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:00 crc kubenswrapper[4912]: E1208 21:21:00.714460 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:01.214444699 +0000 UTC m=+143.077446772 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.816257 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:00 crc kubenswrapper[4912]: E1208 21:21:00.816661 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:01.316637795 +0000 UTC m=+143.179639878 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:00 crc kubenswrapper[4912]: I1208 21:21:00.925672 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:00 crc kubenswrapper[4912]: E1208 21:21:00.926408 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:01.426393938 +0000 UTC m=+143.289396021 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.027160 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:01 crc kubenswrapper[4912]: E1208 21:21:01.027652 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:01.527629495 +0000 UTC m=+143.390631578 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.033479 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.133174 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:01 crc kubenswrapper[4912]: E1208 21:21:01.134754 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:01.634729653 +0000 UTC m=+143.497731946 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.143157 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" event={"ID":"81b4fd7c-89e0-4795-b372-2d859131320c","Type":"ContainerStarted","Data":"15f11ce48ebaf4c2b16ad1039a473a6bbd835c79a7371eb2b1b59343f3436c05"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.143237 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" event={"ID":"81b4fd7c-89e0-4795-b372-2d859131320c","Type":"ContainerStarted","Data":"1fe4cc9c7fa923eb4de00cf9862166220d10ac4148c05a3cd3ae5a850e182d4d"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.177465 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" event={"ID":"225dcce8-cf38-4809-b2de-41f62273bf8d","Type":"ContainerStarted","Data":"8637b07f735a0aa39393f2b5d8495f8f4ec922e1513add3cfda7a637f06e5a8c"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.177522 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" event={"ID":"225dcce8-cf38-4809-b2de-41f62273bf8d","Type":"ContainerStarted","Data":"31d5dbd2865d5ae208e2ab3988b4b3f257926f79f8c41bb971442e975016e12b"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.177979 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.181259 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" event={"ID":"01944eae-d386-4ca1-b6fd-bb27526cc5ff","Type":"ContainerStarted","Data":"eb77ae5f6bbf01e92df7c19d64ab00f64bb9699a66d6d095aa4bf5597417ce8a"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.184359 4912 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-fq6s8 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.184402 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" podUID="225dcce8-cf38-4809-b2de-41f62273bf8d" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.186563 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" event={"ID":"9f40e8be-f418-4be6-912f-d732718730e7","Type":"ContainerStarted","Data":"71906c229188b9c1373923699b33a89a038f1bf2da6626c12ba743ea673cba6a"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.186645 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" event={"ID":"9f40e8be-f418-4be6-912f-d732718730e7","Type":"ContainerStarted","Data":"fcbbd3eb0440f3b9506a0de3c9a83f9a75740de8a170432f83d17b009fa0323b"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.187776 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.202314 4912 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mw698 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.202390 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" podUID="9f40e8be-f418-4be6-912f-d732718730e7" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.203499 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" podStartSLOduration=124.20347457 podStartE2EDuration="2m4.20347457s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:01.202913728 +0000 UTC m=+143.065915811" watchObservedRunningTime="2025-12-08 21:21:01.20347457 +0000 UTC m=+143.066476653" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.213201 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" event={"ID":"d98e9979-80de-4f7d-a420-989bc116ab5c","Type":"ContainerStarted","Data":"b82900e7a2e3ab46f1040f5fef0a7765a5bf8067763cdb4c63e8475fb99b06b1"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.235906 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:01 crc kubenswrapper[4912]: E1208 21:21:01.236450 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:01.736415929 +0000 UTC m=+143.599418012 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.237095 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:01 crc kubenswrapper[4912]: E1208 21:21:01.239373 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:01.739349179 +0000 UTC m=+143.602351262 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.239909 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" event={"ID":"428a33f4-5469-49d6-a012-49c7656807a7","Type":"ContainerStarted","Data":"8ac720140e6502ed4ab7427863ad689a0447effda59e2e920387fbb7adfa7b9b"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.242944 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-b77mr" event={"ID":"0fb52c9e-bb73-483a-a064-b1bf85f5e901","Type":"ContainerStarted","Data":"f5660b688887cd69f68f1f4fe8b6b31715131cc9d7a58556077d746f35b624aa"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.263745 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lltjv" event={"ID":"c189484f-e280-4468-b815-f56ef70e07eb","Type":"ContainerStarted","Data":"a54679137f1f9bfb40df769e65ef3069fee7153a4abc00f2fcfd187d1e8646ca"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.263808 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lltjv" event={"ID":"c189484f-e280-4468-b815-f56ef70e07eb","Type":"ContainerStarted","Data":"73623b3849f7010871ae5f93b09f0b00397e897e6b7dddefe8436859142fdda5"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.280073 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" event={"ID":"48c80085-ebde-44ec-a981-c024ce9d008d","Type":"ContainerStarted","Data":"4fff142db9425f23b186667e5c613aaabefd6d1f3d5835a3ec91a84f9425d4c7"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.287251 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz" event={"ID":"3104e42f-18cf-41de-b704-8b63a4ae2a44","Type":"ContainerStarted","Data":"af39e628247ce2deb429346d398b3b67a9205317e4cf80ceabbd5f7a7c3e5ffe"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.298142 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc" event={"ID":"6ba5e948-9e8e-432b-9973-e0248bc2b82e","Type":"ContainerStarted","Data":"7d2b8dc86e57b808ac94140913fe8687a02c892777a7703d678169d4cdaf5090"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.304621 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" event={"ID":"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6","Type":"ContainerStarted","Data":"b28ecde27ad076f70b3bf434289ada14d96fb6c7e12eda5ffd8a452dee5a7af4"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.310138 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" event={"ID":"5ecd7845-9751-4783-926a-7b6f6344a767","Type":"ContainerStarted","Data":"79381cac4b350cc18ca65bdf27bf2fbb677e3aaee72b0667578b218841754e57"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.311970 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-tmgx2" event={"ID":"2573e7ee-4cff-4144-97d8-b19c2c5d1f42","Type":"ContainerStarted","Data":"0a144ba06f52958f75b4963c1a4902135902cd7b698c8c04d7d579f11fbf6304"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.324947 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" podStartSLOduration=124.324918543 podStartE2EDuration="2m4.324918543s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:01.296277313 +0000 UTC m=+143.159279396" watchObservedRunningTime="2025-12-08 21:21:01.324918543 +0000 UTC m=+143.187920626" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.338737 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:01 crc kubenswrapper[4912]: E1208 21:21:01.339151 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:01.839132316 +0000 UTC m=+143.702134399 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.340695 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" event={"ID":"6a2dcdec-9c49-47dd-baa2-f8a0a5ba8cae","Type":"ContainerStarted","Data":"bfcca7270aca41401212d6ce00f30fc8be82608ab24fca1a4b773ef90bd98e62"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.364451 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tt47m" podStartSLOduration=124.364423848 podStartE2EDuration="2m4.364423848s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:01.327427175 +0000 UTC m=+143.190429268" watchObservedRunningTime="2025-12-08 21:21:01.364423848 +0000 UTC m=+143.227425931" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.364892 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lltjv" podStartSLOduration=124.364887757 podStartE2EDuration="2m4.364887757s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:01.362481888 +0000 UTC m=+143.225483981" watchObservedRunningTime="2025-12-08 21:21:01.364887757 +0000 UTC m=+143.227889840" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.377515 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" event={"ID":"81767e03-8fa4-4b32-822b-829600cb8114","Type":"ContainerStarted","Data":"6a757c0fb635396f8f72ec8ec3c38c9c5737d31ea4955c2c0e01ea6af5d69e3b"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.377811 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" event={"ID":"81767e03-8fa4-4b32-822b-829600cb8114","Type":"ContainerStarted","Data":"554874a5d095407e3f21caff6e0fb4260c74e6f64e2843dce08ad185f0baa0fd"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.387522 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" event={"ID":"f0ff0e40-991b-434a-bb8b-ae2bb3f6b559","Type":"ContainerStarted","Data":"7a423d4b5affd1cf7c1bcbb3a9166ab229d13cd95caa65f8a73ea5718cdf2bcd"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.407497 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.417063 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" event={"ID":"8a397f96-299f-4205-abb9-a261f73b1305","Type":"ContainerStarted","Data":"4eca51242a4ead50bc0e67fcfbb9ae4cd545a6d82766c3ce7e03acb6e3826561"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.424364 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" event={"ID":"2d947845-a2f2-4d22-be7f-da51b6edac44","Type":"ContainerStarted","Data":"28db853261cebccca6cbe1d9ed7244a433c3d5ecd496fcf6ae902f2a478b12b0"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.425273 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.425439 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:01 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:01 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:01 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.425475 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.427214 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2g5dn" event={"ID":"36c61c78-9258-4664-89ed-f1f82a6f9e06","Type":"ContainerStarted","Data":"9c91eca41926e015ad16343e20f57ca3b461dcf5b8aede649898c5bba2955c97"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.427993 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.435886 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-mt2rf" event={"ID":"98f85866-57fd-4792-889d-1d16aea9d19e","Type":"ContainerStarted","Data":"2c5014b2fdba02b402bbf8678a5e333337047e01d54a9b397e821ec735f447dc"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.442384 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-bfkpt" event={"ID":"49c88b45-1149-4b8a-b390-d817da5ae936","Type":"ContainerStarted","Data":"f17aad2110f63e803e1783a560d2e72bd6d591a141e45b269005f833bcb1baa8"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.454980 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.456712 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" event={"ID":"1259f14f-f93a-4765-9f93-ac4af158951e","Type":"ContainerStarted","Data":"2ad250d2f2c63eb441a1c978152f4a9a62ebb307b39ac3fdd52cca03e61bb421"} Dec 08 21:21:01 crc kubenswrapper[4912]: E1208 21:21:01.457343 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:01.957320183 +0000 UTC m=+143.820322266 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.457906 4912 patch_prober.go:28] interesting pod/console-operator-58897d9998-2g5dn container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/readyz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.457955 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-2g5dn" podUID="36c61c78-9258-4664-89ed-f1f82a6f9e06" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/readyz\": dial tcp 10.217.0.27:8443: connect: connection refused" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.466737 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" event={"ID":"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d","Type":"ContainerStarted","Data":"9f06bfa7200fccf7999b8a793fb9951490bbe30ac60dc2901674e9b0ba38bee5"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.472909 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" event={"ID":"e4c5991e-05aa-4110-92df-be3bd07d0a32","Type":"ContainerStarted","Data":"d4167f2d226516f32b8cd65c925b1d37e6e9d48c2555cd63ac00ef56b2a40c52"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.480816 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-b77mr" podStartSLOduration=124.480791327 podStartE2EDuration="2m4.480791327s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:01.420646277 +0000 UTC m=+143.283648350" watchObservedRunningTime="2025-12-08 21:21:01.480791327 +0000 UTC m=+143.343793400" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.509813 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-gdkk9" event={"ID":"60cbb4f5-7131-4242-8a30-e9706990636f","Type":"ContainerStarted","Data":"6ba960f66e59e522418c073a8be4f795b272886b6e9dca4e5528f4585f3aa757"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.515267 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bx7nc" podStartSLOduration=124.515246397 podStartE2EDuration="2m4.515246397s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:01.51442809 +0000 UTC m=+143.377430173" watchObservedRunningTime="2025-12-08 21:21:01.515246397 +0000 UTC m=+143.378248480" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.515379 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-96qt5" podStartSLOduration=124.5153734 podStartE2EDuration="2m4.5153734s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:01.482483782 +0000 UTC m=+143.345485865" watchObservedRunningTime="2025-12-08 21:21:01.5153734 +0000 UTC m=+143.378375483" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.518954 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" event={"ID":"a61272d2-307f-4dcb-b98a-16b015c7c8bd","Type":"ContainerStarted","Data":"add4587e092774e5268dc213af14ace3ffb37e885047204dd1c73e6c75fa186d"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.535164 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" event={"ID":"b20ccfdc-c499-40e4-9a0b-bebc13394494","Type":"ContainerStarted","Data":"05bd3e1fbd09014ffa75b28732ceb3c631a078d9d50e7311e56ad017eae7930c"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.543532 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" event={"ID":"eb28504c-22a1-4b85-87f4-3b5c14cc5785","Type":"ContainerStarted","Data":"01877e6c9d4ef262149392110f17d62352dcd010fc399b02e2c0f6e8ea2c0ae1"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.543569 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" event={"ID":"eb28504c-22a1-4b85-87f4-3b5c14cc5785","Type":"ContainerStarted","Data":"dafe5c834aa2a0d798669f8a813bf0e2a8215dccfb1282cdb50fa2af8c4ef5d5"} Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.549450 4912 patch_prober.go:28] interesting pod/downloads-7954f5f757-8rkpr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.549505 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8rkpr" podUID="b17a753b-7754-47a9-8432-0f1fab0fb3be" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.558835 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:01 crc kubenswrapper[4912]: E1208 21:21:01.559387 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:02.059370207 +0000 UTC m=+143.922372290 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.559720 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:01 crc kubenswrapper[4912]: E1208 21:21:01.560172 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:02.060163343 +0000 UTC m=+143.923165426 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.614107 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-msd8v" podStartSLOduration=125.614082454 podStartE2EDuration="2m5.614082454s" podCreationTimestamp="2025-12-08 21:18:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:01.590511578 +0000 UTC m=+143.453513661" watchObservedRunningTime="2025-12-08 21:21:01.614082454 +0000 UTC m=+143.477084537" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.673835 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.689486 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:21:01 crc kubenswrapper[4912]: E1208 21:21:01.690448 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:02.190411168 +0000 UTC m=+144.053413251 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.694184 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.756028 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hz6tw" podStartSLOduration=124.75599983 podStartE2EDuration="2m4.75599983s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:01.697616036 +0000 UTC m=+143.560618119" watchObservedRunningTime="2025-12-08 21:21:01.75599983 +0000 UTC m=+143.619001913" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.779863 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:01 crc kubenswrapper[4912]: E1208 21:21:01.780342 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:02.280327502 +0000 UTC m=+144.143329585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.863461 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.880736 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:01 crc kubenswrapper[4912]: E1208 21:21:01.881183 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:02.38116767 +0000 UTC m=+144.244169753 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.946859 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-2g5dn" podStartSLOduration=124.946836014 podStartE2EDuration="2m4.946836014s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:01.946762683 +0000 UTC m=+143.809764766" watchObservedRunningTime="2025-12-08 21:21:01.946836014 +0000 UTC m=+143.809838087" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.948407 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" podStartSLOduration=124.948398596 podStartE2EDuration="2m4.948398596s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:01.851918678 +0000 UTC m=+143.714920751" watchObservedRunningTime="2025-12-08 21:21:01.948398596 +0000 UTC m=+143.811400679" Dec 08 21:21:01 crc kubenswrapper[4912]: I1208 21:21:01.985332 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:01 crc kubenswrapper[4912]: E1208 21:21:01.985800 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:02.485785057 +0000 UTC m=+144.348787140 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.057216 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" podStartSLOduration=125.057197819 podStartE2EDuration="2m5.057197819s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:02.056474104 +0000 UTC m=+143.919476217" watchObservedRunningTime="2025-12-08 21:21:02.057197819 +0000 UTC m=+143.920199902" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.088674 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:02 crc kubenswrapper[4912]: E1208 21:21:02.089236 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:02.589218329 +0000 UTC m=+144.452220412 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.103172 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" podStartSLOduration=125.103148317 podStartE2EDuration="2m5.103148317s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:02.102910522 +0000 UTC m=+143.965912605" watchObservedRunningTime="2025-12-08 21:21:02.103148317 +0000 UTC m=+143.966150400" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.134383 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zwxn4" podStartSLOduration=125.13435306 podStartE2EDuration="2m5.13435306s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:02.132516132 +0000 UTC m=+143.995518215" watchObservedRunningTime="2025-12-08 21:21:02.13435306 +0000 UTC m=+143.997355143" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.193060 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:02 crc kubenswrapper[4912]: E1208 21:21:02.193442 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:02.693426438 +0000 UTC m=+144.556428521 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.244223 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hfs55" podStartSLOduration=125.244202345 podStartE2EDuration="2m5.244202345s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:02.174164571 +0000 UTC m=+144.037166654" watchObservedRunningTime="2025-12-08 21:21:02.244202345 +0000 UTC m=+144.107204428" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.300931 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:02 crc kubenswrapper[4912]: E1208 21:21:02.301361 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:02.801345693 +0000 UTC m=+144.664347766 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.331278 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nq68n" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.409925 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:02 crc kubenswrapper[4912]: E1208 21:21:02.410311 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:02.910296659 +0000 UTC m=+144.773298742 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.428415 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:02 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:02 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:02 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.428851 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.513441 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:02 crc kubenswrapper[4912]: E1208 21:21:02.514013 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:03.013991056 +0000 UTC m=+144.876993129 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.612420 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" event={"ID":"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6","Type":"ContainerStarted","Data":"4c1b3ec78dbf236132996456d14312e8954d5e963d58318ab83f2349b48788d4"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.615162 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:02 crc kubenswrapper[4912]: E1208 21:21:02.615738 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:03.115724444 +0000 UTC m=+144.978726527 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.635396 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-bfkpt" event={"ID":"49c88b45-1149-4b8a-b390-d817da5ae936","Type":"ContainerStarted","Data":"8b613c386b74291b28d76cdfb1effb70d2699f0bfb85ee3bfdbda6349cc1e058"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.636584 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-bfkpt" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.672615 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" event={"ID":"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d","Type":"ContainerStarted","Data":"c7dbc629ab46a83c042efd996ea7c4433b6b96954334bf17e47f33722899cf71"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.685958 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" event={"ID":"e4c5991e-05aa-4110-92df-be3bd07d0a32","Type":"ContainerStarted","Data":"8eef1f427ff3f9116461bfe4560e59ce046b436d30df1c9413163fdbf50f1ef1"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.704210 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-s2plb" event={"ID":"1b7b4ef5-c90c-459b-8285-d5163a0e9b78","Type":"ContainerStarted","Data":"1313117c62402e507dc9cc3a2485f0c3c96f3bdbfdee243c3eb1dba773e23d69"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.722751 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz" event={"ID":"3104e42f-18cf-41de-b704-8b63a4ae2a44","Type":"ContainerStarted","Data":"58bce9bc8a41a13e2752ee55c6d461e4a0e2847d87ce4fde24416955b37fac1d"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.723206 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:02 crc kubenswrapper[4912]: E1208 21:21:02.724431 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:03.224404724 +0000 UTC m=+145.087406807 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.740535 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-qxcg4" podStartSLOduration=125.740505026 podStartE2EDuration="2m5.740505026s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:02.740265331 +0000 UTC m=+144.603267414" watchObservedRunningTime="2025-12-08 21:21:02.740505026 +0000 UTC m=+144.603507109" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.740777 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-bfkpt" podStartSLOduration=7.740772062 podStartE2EDuration="7.740772062s" podCreationTimestamp="2025-12-08 21:20:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:02.678291383 +0000 UTC m=+144.541293466" watchObservedRunningTime="2025-12-08 21:21:02.740772062 +0000 UTC m=+144.603774145" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.740797 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" event={"ID":"48c80085-ebde-44ec-a981-c024ce9d008d","Type":"ContainerStarted","Data":"a5f10d8aacc037a1c84fe7ba521e96e6bc922c4e73d3c9c3f1b8050359492e7c"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.761844 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pmz5c" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.779845 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" event={"ID":"01944eae-d386-4ca1-b6fd-bb27526cc5ff","Type":"ContainerStarted","Data":"5cf0eb3313e87d24972143f6fa2b3627e7fc3c0e8b1436830ecf55bad4070ec0"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.793057 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wjcc5" podStartSLOduration=125.793023499 podStartE2EDuration="2m5.793023499s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:02.791973877 +0000 UTC m=+144.654975960" watchObservedRunningTime="2025-12-08 21:21:02.793023499 +0000 UTC m=+144.656025582" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.818874 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" event={"ID":"428a33f4-5469-49d6-a012-49c7656807a7","Type":"ContainerStarted","Data":"0ea0b1de5b4668720e18408730ab2dbd37e6c94ea2c1644823b7cb52e73ebde3"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.825995 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:02 crc kubenswrapper[4912]: E1208 21:21:02.828282 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:03.328267335 +0000 UTC m=+145.191269418 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.868433 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-gdkk9" event={"ID":"60cbb4f5-7131-4242-8a30-e9706990636f","Type":"ContainerStarted","Data":"f3f002232c6984a24622a735d007bcb1a8f745a3fc30eb9d6d4a754525974c13"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.895012 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-tmgx2" event={"ID":"2573e7ee-4cff-4144-97d8-b19c2c5d1f42","Type":"ContainerStarted","Data":"4d4edc7558ebb60787559eb774a3a5ac633e9935f5a5a055c2e7d166d254723a"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.895115 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-tmgx2" event={"ID":"2573e7ee-4cff-4144-97d8-b19c2c5d1f42","Type":"ContainerStarted","Data":"ebbbba6e839717085ae2e537d3fffe337ca401aad7c3ad6a84330fa6dd26a062"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.897605 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" event={"ID":"5ecd7845-9751-4783-926a-7b6f6344a767","Type":"ContainerStarted","Data":"949e2872519a324e007ffb2ddf65c79b85ea7472855a9c06911d982b90fae161"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.897641 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" event={"ID":"5ecd7845-9751-4783-926a-7b6f6344a767","Type":"ContainerStarted","Data":"5baef77100550e451f5e0760f5434e0037550773ed660fa1e3cd71011d3935b1"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.898175 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.912589 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2g5dn" event={"ID":"36c61c78-9258-4664-89ed-f1f82a6f9e06","Type":"ContainerStarted","Data":"9dfca499d20768e6c817117a8a87d720e198428c727a2a2c09dfc26914c8c39d"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.926814 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:02 crc kubenswrapper[4912]: E1208 21:21:02.928933 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:03.42891775 +0000 UTC m=+145.291919833 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.958901 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" event={"ID":"8a397f96-299f-4205-abb9-a261f73b1305","Type":"ContainerStarted","Data":"347e3c3e93870e2c4855b8078cf153dfcc8bef1777d0a0bb7e89c338b533de29"} Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.966806 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.966872 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:21:02 crc kubenswrapper[4912]: I1208 21:21:02.991517 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-tmgx2" podStartSLOduration=125.99149602 podStartE2EDuration="2m5.99149602s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:02.988826615 +0000 UTC m=+144.851828698" watchObservedRunningTime="2025-12-08 21:21:02.99149602 +0000 UTC m=+144.854498103" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:02.999540 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dtf8c" podStartSLOduration=125.999508345 podStartE2EDuration="2m5.999508345s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:02.928708876 +0000 UTC m=+144.791710959" watchObservedRunningTime="2025-12-08 21:21:02.999508345 +0000 UTC m=+144.862510428" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.027570 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" event={"ID":"81b4fd7c-89e0-4795-b372-2d859131320c","Type":"ContainerStarted","Data":"8a8189fbeece401e800220e0429f3b7e831ce0d9ae4808934d14cdd127a965bd"} Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.035011 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:03 crc kubenswrapper[4912]: E1208 21:21:03.040437 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:03.540408249 +0000 UTC m=+145.403410332 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.056846 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-mt2rf" event={"ID":"98f85866-57fd-4792-889d-1d16aea9d19e","Type":"ContainerStarted","Data":"8cd856dc8425fc83886ef174c6ab9a2a55dfd4e4f14a8ec0a3e15674a7469dc9"} Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.059729 4912 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mw698 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.059844 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" podUID="9f40e8be-f418-4be6-912f-d732718730e7" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.066082 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dgql6" podStartSLOduration=126.066061297 podStartE2EDuration="2m6.066061297s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:03.065332422 +0000 UTC m=+144.928334515" watchObservedRunningTime="2025-12-08 21:21:03.066061297 +0000 UTC m=+144.929063400" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.092228 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fq6s8" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.146010 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-765c9" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.146989 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:03 crc kubenswrapper[4912]: E1208 21:21:03.158090 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:03.657936342 +0000 UTC m=+145.520938425 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.209900 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-gdkk9" podStartSLOduration=126.209882852 podStartE2EDuration="2m6.209882852s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:03.209249269 +0000 UTC m=+145.072251342" watchObservedRunningTime="2025-12-08 21:21:03.209882852 +0000 UTC m=+145.072884935" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.211861 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" podStartSLOduration=126.211853463 podStartE2EDuration="2m6.211853463s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:03.146525896 +0000 UTC m=+145.009527989" watchObservedRunningTime="2025-12-08 21:21:03.211853463 +0000 UTC m=+145.074855546" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.254938 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:03 crc kubenswrapper[4912]: E1208 21:21:03.255410 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:03.755397721 +0000 UTC m=+145.618399794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.356922 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:03 crc kubenswrapper[4912]: E1208 21:21:03.357209 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:03.857172109 +0000 UTC m=+145.720174192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.418893 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:03 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:03 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:03 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.418979 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.451236 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-mt2rf" podStartSLOduration=8.451215127 podStartE2EDuration="8.451215127s" podCreationTimestamp="2025-12-08 21:20:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:03.45038783 +0000 UTC m=+145.313389913" watchObservedRunningTime="2025-12-08 21:21:03.451215127 +0000 UTC m=+145.314217210" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.464149 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:03 crc kubenswrapper[4912]: E1208 21:21:03.464548 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:03.964532242 +0000 UTC m=+145.827534325 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.565778 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:03 crc kubenswrapper[4912]: E1208 21:21:03.566152 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:04.066129046 +0000 UTC m=+145.929131139 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.652656 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kxgdg" podStartSLOduration=126.65263286 podStartE2EDuration="2m6.65263286s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:03.648984025 +0000 UTC m=+145.511986108" watchObservedRunningTime="2025-12-08 21:21:03.65263286 +0000 UTC m=+145.515634943" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.667619 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:03 crc kubenswrapper[4912]: E1208 21:21:03.668158 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:04.168132309 +0000 UTC m=+146.031134632 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.769257 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:03 crc kubenswrapper[4912]: E1208 21:21:03.769497 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:04.269457928 +0000 UTC m=+146.132460011 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.769723 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:03 crc kubenswrapper[4912]: E1208 21:21:03.770136 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:04.270127762 +0000 UTC m=+146.133129845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.811422 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-x7c67" podStartSLOduration=126.811401123 podStartE2EDuration="2m6.811401123s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:03.755824667 +0000 UTC m=+145.618826750" watchObservedRunningTime="2025-12-08 21:21:03.811401123 +0000 UTC m=+145.674403206" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.871223 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:03 crc kubenswrapper[4912]: E1208 21:21:03.871540 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:04.371469511 +0000 UTC m=+146.234471624 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.871749 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:03 crc kubenswrapper[4912]: E1208 21:21:03.872180 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:04.372164705 +0000 UTC m=+146.235166788 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.879948 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2x2xd"] Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.881093 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.973968 4912 patch_prober.go:28] interesting pod/console-operator-58897d9998-2g5dn container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.982870 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-2g5dn" podUID="36c61c78-9258-4664-89ed-f1f82a6f9e06" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.984370 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.984669 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c56535e8-663a-43a9-b596-79a4d3ac0403-utilities\") pod \"community-operators-2x2xd\" (UID: \"c56535e8-663a-43a9-b596-79a4d3ac0403\") " pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.984736 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nntdh\" (UniqueName: \"kubernetes.io/projected/c56535e8-663a-43a9-b596-79a4d3ac0403-kube-api-access-nntdh\") pod \"community-operators-2x2xd\" (UID: \"c56535e8-663a-43a9-b596-79a4d3ac0403\") " pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:21:03 crc kubenswrapper[4912]: I1208 21:21:03.984775 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c56535e8-663a-43a9-b596-79a4d3ac0403-catalog-content\") pod \"community-operators-2x2xd\" (UID: \"c56535e8-663a-43a9-b596-79a4d3ac0403\") " pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:21:03 crc kubenswrapper[4912]: E1208 21:21:03.984944 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:04.48492364 +0000 UTC m=+146.347925723 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.051752 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2k2fc"] Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.054501 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.068979 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.075955 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2x2xd"] Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.076184 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.086950 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm64g\" (UniqueName: \"kubernetes.io/projected/570435e6-b620-4b1c-8f4b-47b36f3bee5e-kube-api-access-zm64g\") pod \"certified-operators-2k2fc\" (UID: \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\") " pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.087023 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/570435e6-b620-4b1c-8f4b-47b36f3bee5e-utilities\") pod \"certified-operators-2k2fc\" (UID: \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\") " pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.087128 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c56535e8-663a-43a9-b596-79a4d3ac0403-utilities\") pod \"community-operators-2x2xd\" (UID: \"c56535e8-663a-43a9-b596-79a4d3ac0403\") " pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.087178 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nntdh\" (UniqueName: \"kubernetes.io/projected/c56535e8-663a-43a9-b596-79a4d3ac0403-kube-api-access-nntdh\") pod \"community-operators-2x2xd\" (UID: \"c56535e8-663a-43a9-b596-79a4d3ac0403\") " pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.087212 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c56535e8-663a-43a9-b596-79a4d3ac0403-catalog-content\") pod \"community-operators-2x2xd\" (UID: \"c56535e8-663a-43a9-b596-79a4d3ac0403\") " pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.087261 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/570435e6-b620-4b1c-8f4b-47b36f3bee5e-catalog-content\") pod \"certified-operators-2k2fc\" (UID: \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\") " pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.087290 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:04 crc kubenswrapper[4912]: E1208 21:21:04.087704 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:04.587686369 +0000 UTC m=+146.450688452 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.088990 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c56535e8-663a-43a9-b596-79a4d3ac0403-utilities\") pod \"community-operators-2x2xd\" (UID: \"c56535e8-663a-43a9-b596-79a4d3ac0403\") " pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.097542 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c56535e8-663a-43a9-b596-79a4d3ac0403-catalog-content\") pod \"community-operators-2x2xd\" (UID: \"c56535e8-663a-43a9-b596-79a4d3ac0403\") " pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.117643 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hgl2c"] Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.118587 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2k2fc"] Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.118689 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.140340 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" event={"ID":"450b96f9-f5a6-485f-a3fd-ae5a408b2b8d","Type":"ContainerStarted","Data":"d162fe838a0338d9ec7833113eece1b7336f6ec2499c0fda1c72aadb71379545"} Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.145096 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hgl2c"] Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.164512 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nntdh\" (UniqueName: \"kubernetes.io/projected/c56535e8-663a-43a9-b596-79a4d3ac0403-kube-api-access-nntdh\") pod \"community-operators-2x2xd\" (UID: \"c56535e8-663a-43a9-b596-79a4d3ac0403\") " pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.173740 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-s2plb" event={"ID":"1b7b4ef5-c90c-459b-8285-d5163a0e9b78","Type":"ContainerStarted","Data":"d9e3b6eee20fd208282e63416dacf63d49d0b78aa7810dfda9f27d9453725376"} Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.191715 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.192059 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-utilities\") pod \"community-operators-hgl2c\" (UID: \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\") " pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.192127 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-catalog-content\") pod \"community-operators-hgl2c\" (UID: \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\") " pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.192257 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/570435e6-b620-4b1c-8f4b-47b36f3bee5e-catalog-content\") pod \"certified-operators-2k2fc\" (UID: \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\") " pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.192313 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm64g\" (UniqueName: \"kubernetes.io/projected/570435e6-b620-4b1c-8f4b-47b36f3bee5e-kube-api-access-zm64g\") pod \"certified-operators-2k2fc\" (UID: \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\") " pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.192371 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lsds\" (UniqueName: \"kubernetes.io/projected/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-kube-api-access-6lsds\") pod \"community-operators-hgl2c\" (UID: \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\") " pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.192397 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/570435e6-b620-4b1c-8f4b-47b36f3bee5e-utilities\") pod \"certified-operators-2k2fc\" (UID: \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\") " pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.210489 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.213637 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/570435e6-b620-4b1c-8f4b-47b36f3bee5e-catalog-content\") pod \"certified-operators-2k2fc\" (UID: \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\") " pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:21:04 crc kubenswrapper[4912]: E1208 21:21:04.214313 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:04.714260648 +0000 UTC m=+146.577262731 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.214853 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/570435e6-b620-4b1c-8f4b-47b36f3bee5e-utilities\") pod \"certified-operators-2k2fc\" (UID: \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\") " pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.215403 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz" event={"ID":"3104e42f-18cf-41de-b704-8b63a4ae2a44","Type":"ContainerStarted","Data":"e46665fbe5ff879215141c884c797a3e8a99c263c1102f67fdbd82e73799b36f"} Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.274551 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-2g5dn" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.278565 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pk9xl"] Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.294591 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm64g\" (UniqueName: \"kubernetes.io/projected/570435e6-b620-4b1c-8f4b-47b36f3bee5e-kube-api-access-zm64g\") pod \"certified-operators-2k2fc\" (UID: \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\") " pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.294893 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lsds\" (UniqueName: \"kubernetes.io/projected/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-kube-api-access-6lsds\") pod \"community-operators-hgl2c\" (UID: \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\") " pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.295866 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-utilities\") pod \"community-operators-hgl2c\" (UID: \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\") " pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.296215 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-catalog-content\") pod \"community-operators-hgl2c\" (UID: \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\") " pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.296727 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.297949 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-utilities\") pod \"community-operators-hgl2c\" (UID: \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\") " pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:04 crc kubenswrapper[4912]: E1208 21:21:04.304261 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:04.804238933 +0000 UTC m=+146.667241016 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.313899 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-catalog-content\") pod \"community-operators-hgl2c\" (UID: \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\") " pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.322677 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pk9xl"] Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.322836 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.324006 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nfncz" podStartSLOduration=127.323994399 podStartE2EDuration="2m7.323994399s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:04.291938149 +0000 UTC m=+146.154940242" watchObservedRunningTime="2025-12-08 21:21:04.323994399 +0000 UTC m=+146.186996482" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.347188 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lsds\" (UniqueName: \"kubernetes.io/projected/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-kube-api-access-6lsds\") pod \"community-operators-hgl2c\" (UID: \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\") " pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.389406 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-s2plb" podStartSLOduration=128.389381857 podStartE2EDuration="2m8.389381857s" podCreationTimestamp="2025-12-08 21:18:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:04.388397817 +0000 UTC m=+146.251399900" watchObservedRunningTime="2025-12-08 21:21:04.389381857 +0000 UTC m=+146.252383940" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.390989 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jn7k8" podStartSLOduration=127.39097971 podStartE2EDuration="2m7.39097971s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:04.35751085 +0000 UTC m=+146.220512933" watchObservedRunningTime="2025-12-08 21:21:04.39097971 +0000 UTC m=+146.253981793" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.401561 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.401779 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-utilities\") pod \"certified-operators-pk9xl\" (UID: \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\") " pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.401837 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-catalog-content\") pod \"certified-operators-pk9xl\" (UID: \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\") " pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.401902 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfc8t\" (UniqueName: \"kubernetes.io/projected/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-kube-api-access-kfc8t\") pod \"certified-operators-pk9xl\" (UID: \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\") " pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.401912 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:21:04 crc kubenswrapper[4912]: E1208 21:21:04.402016 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:04.901997507 +0000 UTC m=+146.764999590 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.427568 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:04 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:04 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:04 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.427632 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.443263 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.505719 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-catalog-content\") pod \"certified-operators-pk9xl\" (UID: \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\") " pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.506143 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfc8t\" (UniqueName: \"kubernetes.io/projected/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-kube-api-access-kfc8t\") pod \"certified-operators-pk9xl\" (UID: \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\") " pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.506184 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.506228 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-utilities\") pod \"certified-operators-pk9xl\" (UID: \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\") " pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.506442 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-catalog-content\") pod \"certified-operators-pk9xl\" (UID: \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\") " pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.506546 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-utilities\") pod \"certified-operators-pk9xl\" (UID: \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\") " pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:21:04 crc kubenswrapper[4912]: E1208 21:21:04.506799 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.006783467 +0000 UTC m=+146.869785550 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.540183 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfc8t\" (UniqueName: \"kubernetes.io/projected/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-kube-api-access-kfc8t\") pod \"certified-operators-pk9xl\" (UID: \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\") " pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.608974 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:04 crc kubenswrapper[4912]: E1208 21:21:04.609569 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.109542806 +0000 UTC m=+146.972544889 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.711126 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:04 crc kubenswrapper[4912]: E1208 21:21:04.712131 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.21211866 +0000 UTC m=+147.075120743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.744160 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.818029 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:04 crc kubenswrapper[4912]: E1208 21:21:04.818479 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.318463733 +0000 UTC m=+147.181465816 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.928240 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:04 crc kubenswrapper[4912]: E1208 21:21:04.928648 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.428634304 +0000 UTC m=+147.291636387 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:04 crc kubenswrapper[4912]: I1208 21:21:04.975947 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2x2xd"] Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.029755 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:05 crc kubenswrapper[4912]: E1208 21:21:05.029914 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.529890602 +0000 UTC m=+147.392892695 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.030238 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:05 crc kubenswrapper[4912]: E1208 21:21:05.030597 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.530583926 +0000 UTC m=+147.393586019 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.131899 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:05 crc kubenswrapper[4912]: E1208 21:21:05.132080 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.632044657 +0000 UTC m=+147.495046740 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.132354 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:05 crc kubenswrapper[4912]: E1208 21:21:05.132686 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.63267324 +0000 UTC m=+147.495675323 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.233646 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:05 crc kubenswrapper[4912]: E1208 21:21:05.234241 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.734215824 +0000 UTC m=+147.597217907 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.262021 4912 generic.go:334] "Generic (PLEG): container finished" podID="1259f14f-f93a-4765-9f93-ac4af158951e" containerID="2ad250d2f2c63eb441a1c978152f4a9a62ebb307b39ac3fdd52cca03e61bb421" exitCode=0 Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.262326 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" event={"ID":"1259f14f-f93a-4765-9f93-ac4af158951e","Type":"ContainerDied","Data":"2ad250d2f2c63eb441a1c978152f4a9a62ebb307b39ac3fdd52cca03e61bb421"} Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.264010 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2x2xd" event={"ID":"c56535e8-663a-43a9-b596-79a4d3ac0403","Type":"ContainerStarted","Data":"08ab12aad0099216238e1c7e672aa433a3e704f4a8dde1d4f4e9cfab21cf511c"} Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.284293 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" event={"ID":"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6","Type":"ContainerStarted","Data":"1c65f433b10660c72d990af7bbd68892d88c9f9cd2e909a2fb05031937102d61"} Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.360108 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:05 crc kubenswrapper[4912]: E1208 21:21:05.366195 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.866179684 +0000 UTC m=+147.729181767 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.441367 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:05 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:05 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:05 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.441434 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.476388 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:05 crc kubenswrapper[4912]: E1208 21:21:05.476491 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.976474668 +0000 UTC m=+147.839476751 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.476750 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.476782 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.476862 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.476887 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.476920 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:05 crc kubenswrapper[4912]: E1208 21:21:05.477226 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:05.977219443 +0000 UTC m=+147.840221526 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.488864 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.500805 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.509237 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.511957 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.536181 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.536970 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.539112 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.542365 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.547856 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.549172 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.559838 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.578670 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.578961 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cef5b95-055b-4069-b2bf-65ef8665c9f8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4cef5b95-055b-4069-b2bf-65ef8665c9f8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.579019 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cef5b95-055b-4069-b2bf-65ef8665c9f8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4cef5b95-055b-4069-b2bf-65ef8665c9f8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:21:05 crc kubenswrapper[4912]: E1208 21:21:05.579166 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:06.079151385 +0000 UTC m=+147.942153468 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.587409 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.680093 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cef5b95-055b-4069-b2bf-65ef8665c9f8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4cef5b95-055b-4069-b2bf-65ef8665c9f8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.680172 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.680211 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cef5b95-055b-4069-b2bf-65ef8665c9f8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4cef5b95-055b-4069-b2bf-65ef8665c9f8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.680299 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cef5b95-055b-4069-b2bf-65ef8665c9f8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4cef5b95-055b-4069-b2bf-65ef8665c9f8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:21:05 crc kubenswrapper[4912]: E1208 21:21:05.680872 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:06.180860701 +0000 UTC m=+148.043862784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.783862 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:05 crc kubenswrapper[4912]: E1208 21:21:05.785051 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:06.285017529 +0000 UTC m=+148.148019612 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.896556 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:05 crc kubenswrapper[4912]: E1208 21:21:05.896991 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:06.396975557 +0000 UTC m=+148.259977640 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.932886 4912 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.945453 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cef5b95-055b-4069-b2bf-65ef8665c9f8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4cef5b95-055b-4069-b2bf-65ef8665c9f8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.993405 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8p8tl"] Dec 08 21:21:05 crc kubenswrapper[4912]: I1208 21:21:05.994790 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:05.998329 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:06 crc kubenswrapper[4912]: E1208 21:21:05.998740 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:06.498717904 +0000 UTC m=+148.361719987 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:05.998802 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5rbg\" (UniqueName: \"kubernetes.io/projected/ba9bc9a7-7ac4-43eb-a545-099564781a42-kube-api-access-r5rbg\") pod \"redhat-marketplace-8p8tl\" (UID: \"ba9bc9a7-7ac4-43eb-a545-099564781a42\") " pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:05.998850 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:05.998871 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba9bc9a7-7ac4-43eb-a545-099564781a42-utilities\") pod \"redhat-marketplace-8p8tl\" (UID: \"ba9bc9a7-7ac4-43eb-a545-099564781a42\") " pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:05.998897 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba9bc9a7-7ac4-43eb-a545-099564781a42-catalog-content\") pod \"redhat-marketplace-8p8tl\" (UID: \"ba9bc9a7-7ac4-43eb-a545-099564781a42\") " pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:06 crc kubenswrapper[4912]: E1208 21:21:05.999267 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:06.499261035 +0000 UTC m=+148.362263118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.108450 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.111160 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba9bc9a7-7ac4-43eb-a545-099564781a42-utilities\") pod \"redhat-marketplace-8p8tl\" (UID: \"ba9bc9a7-7ac4-43eb-a545-099564781a42\") " pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.111297 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba9bc9a7-7ac4-43eb-a545-099564781a42-catalog-content\") pod \"redhat-marketplace-8p8tl\" (UID: \"ba9bc9a7-7ac4-43eb-a545-099564781a42\") " pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.111547 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5rbg\" (UniqueName: \"kubernetes.io/projected/ba9bc9a7-7ac4-43eb-a545-099564781a42-kube-api-access-r5rbg\") pod \"redhat-marketplace-8p8tl\" (UID: \"ba9bc9a7-7ac4-43eb-a545-099564781a42\") " pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.115272 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.116723 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba9bc9a7-7ac4-43eb-a545-099564781a42-utilities\") pod \"redhat-marketplace-8p8tl\" (UID: \"ba9bc9a7-7ac4-43eb-a545-099564781a42\") " pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:06 crc kubenswrapper[4912]: E1208 21:21:06.116868 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:06.61684675 +0000 UTC m=+148.479848833 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.117362 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba9bc9a7-7ac4-43eb-a545-099564781a42-catalog-content\") pod \"redhat-marketplace-8p8tl\" (UID: \"ba9bc9a7-7ac4-43eb-a545-099564781a42\") " pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.145110 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5rbg\" (UniqueName: \"kubernetes.io/projected/ba9bc9a7-7ac4-43eb-a545-099564781a42-kube-api-access-r5rbg\") pod \"redhat-marketplace-8p8tl\" (UID: \"ba9bc9a7-7ac4-43eb-a545-099564781a42\") " pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.160697 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8p8tl"] Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.180492 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w779q"] Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.181923 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.186196 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.214379 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:06 crc kubenswrapper[4912]: E1208 21:21:06.214783 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:06.714767588 +0000 UTC m=+148.577769671 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.216629 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w779q"] Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.257142 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hgl2c"] Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.258542 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2k2fc"] Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.321458 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.321799 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kmcq\" (UniqueName: \"kubernetes.io/projected/7c2d0a46-44cd-4e4d-844c-99ab171020a8-kube-api-access-5kmcq\") pod \"redhat-marketplace-w779q\" (UID: \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\") " pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.321855 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c2d0a46-44cd-4e4d-844c-99ab171020a8-utilities\") pod \"redhat-marketplace-w779q\" (UID: \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\") " pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.321888 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c2d0a46-44cd-4e4d-844c-99ab171020a8-catalog-content\") pod \"redhat-marketplace-w779q\" (UID: \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\") " pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:21:06 crc kubenswrapper[4912]: E1208 21:21:06.322044 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:06.822015269 +0000 UTC m=+148.685017342 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.323513 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.406122 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pk9xl"] Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.407376 4912 generic.go:334] "Generic (PLEG): container finished" podID="c56535e8-663a-43a9-b596-79a4d3ac0403" containerID="7c2048822762cebadc26ba497ad3098ba8a102490a7e9b119c0d5912857be239" exitCode=0 Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.407972 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2x2xd" event={"ID":"c56535e8-663a-43a9-b596-79a4d3ac0403","Type":"ContainerDied","Data":"7c2048822762cebadc26ba497ad3098ba8a102490a7e9b119c0d5912857be239"} Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.414842 4912 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.417986 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:06 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:06 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:06 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.418214 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.418962 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2k2fc" event={"ID":"570435e6-b620-4b1c-8f4b-47b36f3bee5e","Type":"ContainerStarted","Data":"e299148a99ae9434f3cac07d64987cd7ac866147fa1eb91a4c78128660fd2475"} Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.422231 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" event={"ID":"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6","Type":"ContainerStarted","Data":"fae497073c039fb9daae129754b0853d53daae83e0a90920f91b79117a68900b"} Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.446333 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kmcq\" (UniqueName: \"kubernetes.io/projected/7c2d0a46-44cd-4e4d-844c-99ab171020a8-kube-api-access-5kmcq\") pod \"redhat-marketplace-w779q\" (UID: \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\") " pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.446397 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.446423 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c2d0a46-44cd-4e4d-844c-99ab171020a8-utilities\") pod \"redhat-marketplace-w779q\" (UID: \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\") " pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.446458 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c2d0a46-44cd-4e4d-844c-99ab171020a8-catalog-content\") pod \"redhat-marketplace-w779q\" (UID: \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\") " pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.446951 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c2d0a46-44cd-4e4d-844c-99ab171020a8-catalog-content\") pod \"redhat-marketplace-w779q\" (UID: \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\") " pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:21:06 crc kubenswrapper[4912]: E1208 21:21:06.447077 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 21:21:06.947056407 +0000 UTC m=+148.810058490 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-45v4h" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.447219 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c2d0a46-44cd-4e4d-844c-99ab171020a8-utilities\") pod \"redhat-marketplace-w779q\" (UID: \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\") " pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.499056 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kmcq\" (UniqueName: \"kubernetes.io/projected/7c2d0a46-44cd-4e4d-844c-99ab171020a8-kube-api-access-5kmcq\") pod \"redhat-marketplace-w779q\" (UID: \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\") " pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.569532 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:06 crc kubenswrapper[4912]: E1208 21:21:06.570910 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 21:21:07.07089371 +0000 UTC m=+148.933895793 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.605128 4912 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-08T21:21:05.932920148Z","Handler":null,"Name":""} Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.637777 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.653326 4912 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.653371 4912 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.718358 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.732723 4912 patch_prober.go:28] interesting pod/downloads-7954f5f757-8rkpr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.732901 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8rkpr" podUID="b17a753b-7754-47a9-8432-0f1fab0fb3be" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.733141 4912 patch_prober.go:28] interesting pod/downloads-7954f5f757-8rkpr container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.733223 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-8rkpr" podUID="b17a753b-7754-47a9-8432-0f1fab0fb3be" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.761261 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.761689 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.887196 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-45v4h\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.924691 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.949103 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.976835 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7nhqb"] Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.977982 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:06 crc kubenswrapper[4912]: I1208 21:21:06.987214 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:06.995909 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.294554 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xd4t\" (UniqueName: \"kubernetes.io/projected/3602ee8f-3aa3-4873-a791-5e695083cd99-kube-api-access-4xd4t\") pod \"redhat-operators-7nhqb\" (UID: \"3602ee8f-3aa3-4873-a791-5e695083cd99\") " pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.295028 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3602ee8f-3aa3-4873-a791-5e695083cd99-utilities\") pod \"redhat-operators-7nhqb\" (UID: \"3602ee8f-3aa3-4873-a791-5e695083cd99\") " pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.295086 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3602ee8f-3aa3-4873-a791-5e695083cd99-catalog-content\") pod \"redhat-operators-7nhqb\" (UID: \"3602ee8f-3aa3-4873-a791-5e695083cd99\") " pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.397483 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3602ee8f-3aa3-4873-a791-5e695083cd99-utilities\") pod \"redhat-operators-7nhqb\" (UID: \"3602ee8f-3aa3-4873-a791-5e695083cd99\") " pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.397533 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3602ee8f-3aa3-4873-a791-5e695083cd99-catalog-content\") pod \"redhat-operators-7nhqb\" (UID: \"3602ee8f-3aa3-4873-a791-5e695083cd99\") " pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.397582 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xd4t\" (UniqueName: \"kubernetes.io/projected/3602ee8f-3aa3-4873-a791-5e695083cd99-kube-api-access-4xd4t\") pod \"redhat-operators-7nhqb\" (UID: \"3602ee8f-3aa3-4873-a791-5e695083cd99\") " pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.398253 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.401424 4912 patch_prober.go:28] interesting pod/console-f9d7485db-46knq container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.401489 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-46knq" podUID="1627b83a-6756-4797-b857-7495c262d53c" containerName="console" probeResult="failure" output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.404399 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.426983 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.427933 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.431023 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:07 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:07 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:07 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.431112 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.440415 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tc2h7"] Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.446155 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.446558 4912 patch_prober.go:28] interesting pod/apiserver-76f77b778f-s2plb container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 08 21:21:07 crc kubenswrapper[4912]: [+]log ok Dec 08 21:21:07 crc kubenswrapper[4912]: [+]etcd ok Dec 08 21:21:07 crc kubenswrapper[4912]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 08 21:21:07 crc kubenswrapper[4912]: [+]poststarthook/generic-apiserver-start-informers ok Dec 08 21:21:07 crc kubenswrapper[4912]: [+]poststarthook/max-in-flight-filter ok Dec 08 21:21:07 crc kubenswrapper[4912]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 08 21:21:07 crc kubenswrapper[4912]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 08 21:21:07 crc kubenswrapper[4912]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 08 21:21:07 crc kubenswrapper[4912]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 08 21:21:07 crc kubenswrapper[4912]: [+]poststarthook/project.openshift.io-projectcache ok Dec 08 21:21:07 crc kubenswrapper[4912]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 08 21:21:07 crc kubenswrapper[4912]: [+]poststarthook/openshift.io-startinformers ok Dec 08 21:21:07 crc kubenswrapper[4912]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 08 21:21:07 crc kubenswrapper[4912]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 08 21:21:07 crc kubenswrapper[4912]: livez check failed Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.446631 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-s2plb" podUID="1b7b4ef5-c90c-459b-8285-d5163a0e9b78" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.461859 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7nhqb"] Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.486743 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tc2h7"] Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.505377 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hgl2c" event={"ID":"139572d3-34d8-4dc2-ac47-d2597bdfb3ec","Type":"ContainerStarted","Data":"95e47dd73e3fd7cda2c323c078a358b68b13d226728e40a132cbc20e283dbd53"} Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.521767 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pk9xl" event={"ID":"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9","Type":"ContainerStarted","Data":"7c81e1d96938580f89dff423fa095e1e73449bdd6929fcb15a710474eba1905c"} Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.530480 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3602ee8f-3aa3-4873-a791-5e695083cd99-utilities\") pod \"redhat-operators-7nhqb\" (UID: \"3602ee8f-3aa3-4873-a791-5e695083cd99\") " pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.533163 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" event={"ID":"2330bc56-ffb4-4f12-8fd3-0ff9d95307c6","Type":"ContainerStarted","Data":"356d7806ba8aa6c902815c05390f4e18937aca9917535ff4e486894c4755dfa9"} Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.536232 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3602ee8f-3aa3-4873-a791-5e695083cd99-catalog-content\") pod \"redhat-operators-7nhqb\" (UID: \"3602ee8f-3aa3-4873-a791-5e695083cd99\") " pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.552762 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xd4t\" (UniqueName: \"kubernetes.io/projected/3602ee8f-3aa3-4873-a791-5e695083cd99-kube-api-access-4xd4t\") pod \"redhat-operators-7nhqb\" (UID: \"3602ee8f-3aa3-4873-a791-5e695083cd99\") " pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.588474 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-7lvnt" podStartSLOduration=12.588449777 podStartE2EDuration="12.588449777s" podCreationTimestamp="2025-12-08 21:20:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:07.582154937 +0000 UTC m=+149.445157030" watchObservedRunningTime="2025-12-08 21:21:07.588449777 +0000 UTC m=+149.451451860" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.619466 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.620333 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv9hg\" (UniqueName: \"kubernetes.io/projected/2f8e72c9-ed46-4394-b36c-64ae0b735077-kube-api-access-hv9hg\") pod \"redhat-operators-tc2h7\" (UID: \"2f8e72c9-ed46-4394-b36c-64ae0b735077\") " pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.620395 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f8e72c9-ed46-4394-b36c-64ae0b735077-catalog-content\") pod \"redhat-operators-tc2h7\" (UID: \"2f8e72c9-ed46-4394-b36c-64ae0b735077\") " pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.620531 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f8e72c9-ed46-4394-b36c-64ae0b735077-utilities\") pod \"redhat-operators-tc2h7\" (UID: \"2f8e72c9-ed46-4394-b36c-64ae0b735077\") " pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.733365 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv9hg\" (UniqueName: \"kubernetes.io/projected/2f8e72c9-ed46-4394-b36c-64ae0b735077-kube-api-access-hv9hg\") pod \"redhat-operators-tc2h7\" (UID: \"2f8e72c9-ed46-4394-b36c-64ae0b735077\") " pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.733411 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f8e72c9-ed46-4394-b36c-64ae0b735077-catalog-content\") pod \"redhat-operators-tc2h7\" (UID: \"2f8e72c9-ed46-4394-b36c-64ae0b735077\") " pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.733457 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f8e72c9-ed46-4394-b36c-64ae0b735077-utilities\") pod \"redhat-operators-tc2h7\" (UID: \"2f8e72c9-ed46-4394-b36c-64ae0b735077\") " pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.734668 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f8e72c9-ed46-4394-b36c-64ae0b735077-utilities\") pod \"redhat-operators-tc2h7\" (UID: \"2f8e72c9-ed46-4394-b36c-64ae0b735077\") " pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.734937 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f8e72c9-ed46-4394-b36c-64ae0b735077-catalog-content\") pod \"redhat-operators-tc2h7\" (UID: \"2f8e72c9-ed46-4394-b36c-64ae0b735077\") " pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:07 crc kubenswrapper[4912]: I1208 21:21:07.788368 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv9hg\" (UniqueName: \"kubernetes.io/projected/2f8e72c9-ed46-4394-b36c-64ae0b735077-kube-api-access-hv9hg\") pod \"redhat-operators-tc2h7\" (UID: \"2f8e72c9-ed46-4394-b36c-64ae0b735077\") " pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.094129 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.134679 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-45v4h"] Dec 08 21:21:08 crc kubenswrapper[4912]: W1208 21:21:08.136192 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-fe1bf417888658a6a51b24a9c85e6e15c061a957c534b882a79b332d7af458b2 WatchSource:0}: Error finding container fe1bf417888658a6a51b24a9c85e6e15c061a957c534b882a79b332d7af458b2: Status 404 returned error can't find the container with id fe1bf417888658a6a51b24a9c85e6e15c061a957c534b882a79b332d7af458b2 Dec 08 21:21:08 crc kubenswrapper[4912]: W1208 21:21:08.159182 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod4cef5b95_055b_4069_b2bf_65ef8665c9f8.slice/crio-3857d0ceb481860c7e2ba59a247e26e72b8e3c0a4aa54a0c2595b3254e639c6d WatchSource:0}: Error finding container 3857d0ceb481860c7e2ba59a247e26e72b8e3c0a4aa54a0c2595b3254e639c6d: Status 404 returned error can't find the container with id 3857d0ceb481860c7e2ba59a247e26e72b8e3c0a4aa54a0c2595b3254e639c6d Dec 08 21:21:08 crc kubenswrapper[4912]: W1208 21:21:08.165411 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-7a7e1d3b33ff6de9fea55476feb6a591a2b5fbc8b2a3b188bab806781d7bfab8 WatchSource:0}: Error finding container 7a7e1d3b33ff6de9fea55476feb6a591a2b5fbc8b2a3b188bab806781d7bfab8: Status 404 returned error can't find the container with id 7a7e1d3b33ff6de9fea55476feb6a591a2b5fbc8b2a3b188bab806781d7bfab8 Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.166289 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.204482 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.209097 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8p8tl"] Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.209236 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w779q"] Dec 08 21:21:08 crc kubenswrapper[4912]: W1208 21:21:08.238647 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5facd66_e234_44bc_b3b5_36f9860d98d1.slice/crio-f012ea9994c5f156920eabbb4afc59021f1dda63a02ca079c23aa72610e9828c WatchSource:0}: Error finding container f012ea9994c5f156920eabbb4afc59021f1dda63a02ca079c23aa72610e9828c: Status 404 returned error can't find the container with id f012ea9994c5f156920eabbb4afc59021f1dda63a02ca079c23aa72610e9828c Dec 08 21:21:08 crc kubenswrapper[4912]: W1208 21:21:08.241021 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba9bc9a7_7ac4_43eb_a545_099564781a42.slice/crio-f0395209b9f27d006d9b867124389eba0248ec1bedcd4c3700a4832c376810f1 WatchSource:0}: Error finding container f0395209b9f27d006d9b867124389eba0248ec1bedcd4c3700a4832c376810f1: Status 404 returned error can't find the container with id f0395209b9f27d006d9b867124389eba0248ec1bedcd4c3700a4832c376810f1 Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.313876 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1259f14f-f93a-4765-9f93-ac4af158951e-secret-volume\") pod \"1259f14f-f93a-4765-9f93-ac4af158951e\" (UID: \"1259f14f-f93a-4765-9f93-ac4af158951e\") " Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.314013 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dhqm\" (UniqueName: \"kubernetes.io/projected/1259f14f-f93a-4765-9f93-ac4af158951e-kube-api-access-7dhqm\") pod \"1259f14f-f93a-4765-9f93-ac4af158951e\" (UID: \"1259f14f-f93a-4765-9f93-ac4af158951e\") " Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.314061 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1259f14f-f93a-4765-9f93-ac4af158951e-config-volume\") pod \"1259f14f-f93a-4765-9f93-ac4af158951e\" (UID: \"1259f14f-f93a-4765-9f93-ac4af158951e\") " Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.317405 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1259f14f-f93a-4765-9f93-ac4af158951e-config-volume" (OuterVolumeSpecName: "config-volume") pod "1259f14f-f93a-4765-9f93-ac4af158951e" (UID: "1259f14f-f93a-4765-9f93-ac4af158951e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.380443 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.395148 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1259f14f-f93a-4765-9f93-ac4af158951e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1259f14f-f93a-4765-9f93-ac4af158951e" (UID: "1259f14f-f93a-4765-9f93-ac4af158951e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.396994 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1259f14f-f93a-4765-9f93-ac4af158951e-kube-api-access-7dhqm" (OuterVolumeSpecName: "kube-api-access-7dhqm") pod "1259f14f-f93a-4765-9f93-ac4af158951e" (UID: "1259f14f-f93a-4765-9f93-ac4af158951e"). InnerVolumeSpecName "kube-api-access-7dhqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.413609 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:08 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:08 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:08 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.413685 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.414179 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.415585 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dhqm\" (UniqueName: \"kubernetes.io/projected/1259f14f-f93a-4765-9f93-ac4af158951e-kube-api-access-7dhqm\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.415605 4912 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1259f14f-f93a-4765-9f93-ac4af158951e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.415615 4912 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1259f14f-f93a-4765-9f93-ac4af158951e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.611001 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.645620 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" event={"ID":"c5facd66-e234-44bc-b3b5-36f9860d98d1","Type":"ContainerStarted","Data":"f012ea9994c5f156920eabbb4afc59021f1dda63a02ca079c23aa72610e9828c"} Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.670300 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7nhqb"] Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.670401 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w779q" event={"ID":"7c2d0a46-44cd-4e4d-844c-99ab171020a8","Type":"ContainerStarted","Data":"6ed10000db890c730abbfcc1f181616da89c12e38b573b52a9160884aa59fa58"} Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.697755 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"fe1bf417888658a6a51b24a9c85e6e15c061a957c534b882a79b332d7af458b2"} Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.700133 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"7a7e1d3b33ff6de9fea55476feb6a591a2b5fbc8b2a3b188bab806781d7bfab8"} Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.762751 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" event={"ID":"1259f14f-f93a-4765-9f93-ac4af158951e","Type":"ContainerDied","Data":"81b214e583fffc7bd8adc0851554024c0fdf76da36e3a00c328609c3845ede76"} Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.762804 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81b214e583fffc7bd8adc0851554024c0fdf76da36e3a00c328609c3845ede76" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.762884 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh" Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.773640 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"23e261b872e6d7489b98c7dace07725d22c53c05cff22ecb3770e2f16531190a"} Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.780515 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p8tl" event={"ID":"ba9bc9a7-7ac4-43eb-a545-099564781a42","Type":"ContainerStarted","Data":"f0395209b9f27d006d9b867124389eba0248ec1bedcd4c3700a4832c376810f1"} Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.787886 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4cef5b95-055b-4069-b2bf-65ef8665c9f8","Type":"ContainerStarted","Data":"3857d0ceb481860c7e2ba59a247e26e72b8e3c0a4aa54a0c2595b3254e639c6d"} Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.789596 4912 generic.go:334] "Generic (PLEG): container finished" podID="139572d3-34d8-4dc2-ac47-d2597bdfb3ec" containerID="49980ca768af985b963c50d3f61a3451f9f85da28dd8125ed3110e0dee961606" exitCode=0 Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.789664 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hgl2c" event={"ID":"139572d3-34d8-4dc2-ac47-d2597bdfb3ec","Type":"ContainerDied","Data":"49980ca768af985b963c50d3f61a3451f9f85da28dd8125ed3110e0dee961606"} Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.793176 4912 generic.go:334] "Generic (PLEG): container finished" podID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" containerID="5a7b13b061d872cc9187646fb6c4c9340017322c598f627c2691519d2960fc98" exitCode=0 Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.793306 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pk9xl" event={"ID":"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9","Type":"ContainerDied","Data":"5a7b13b061d872cc9187646fb6c4c9340017322c598f627c2691519d2960fc98"} Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.798145 4912 generic.go:334] "Generic (PLEG): container finished" podID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" containerID="50e5c979bb3d7905245a929fd342651abfde3873e6b2c7d909472e56b7073fdc" exitCode=0 Dec 08 21:21:08 crc kubenswrapper[4912]: I1208 21:21:08.799991 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2k2fc" event={"ID":"570435e6-b620-4b1c-8f4b-47b36f3bee5e","Type":"ContainerDied","Data":"50e5c979bb3d7905245a929fd342651abfde3873e6b2c7d909472e56b7073fdc"} Dec 08 21:21:09 crc kubenswrapper[4912]: I1208 21:21:09.010959 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tc2h7"] Dec 08 21:21:09 crc kubenswrapper[4912]: I1208 21:21:09.411762 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:09 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:09 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:09 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:09 crc kubenswrapper[4912]: I1208 21:21:09.411844 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:09 crc kubenswrapper[4912]: I1208 21:21:09.843734 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nhqb" event={"ID":"3602ee8f-3aa3-4873-a791-5e695083cd99","Type":"ContainerStarted","Data":"7e9d7df537650aad9ca0e95ee30276a9e296345ef9187b4891f04be83c1e1e77"} Dec 08 21:21:09 crc kubenswrapper[4912]: I1208 21:21:09.850050 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tc2h7" event={"ID":"2f8e72c9-ed46-4394-b36c-64ae0b735077","Type":"ContainerStarted","Data":"cb5f9e7fe9810fabf646fca6f77f1bb84436969e60a8fadeff22df32fffe0b14"} Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.409645 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.409707 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:10 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:10 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:10 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.409816 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:10 crc kubenswrapper[4912]: E1208 21:21:10.409903 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1259f14f-f93a-4765-9f93-ac4af158951e" containerName="collect-profiles" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.409917 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="1259f14f-f93a-4765-9f93-ac4af158951e" containerName="collect-profiles" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.410048 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="1259f14f-f93a-4765-9f93-ac4af158951e" containerName="collect-profiles" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.411286 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.414774 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.415067 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.415079 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.434724 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eaa43cc1-7b37-4711-a833-d83c5d66a6e5-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"eaa43cc1-7b37-4711-a833-d83c5d66a6e5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.434798 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eaa43cc1-7b37-4711-a833-d83c5d66a6e5-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"eaa43cc1-7b37-4711-a833-d83c5d66a6e5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.535900 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eaa43cc1-7b37-4711-a833-d83c5d66a6e5-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"eaa43cc1-7b37-4711-a833-d83c5d66a6e5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.535954 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eaa43cc1-7b37-4711-a833-d83c5d66a6e5-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"eaa43cc1-7b37-4711-a833-d83c5d66a6e5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.536054 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eaa43cc1-7b37-4711-a833-d83c5d66a6e5-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"eaa43cc1-7b37-4711-a833-d83c5d66a6e5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.564154 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eaa43cc1-7b37-4711-a833-d83c5d66a6e5-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"eaa43cc1-7b37-4711-a833-d83c5d66a6e5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.739128 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.889348 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4cef5b95-055b-4069-b2bf-65ef8665c9f8","Type":"ContainerStarted","Data":"ccdd842f1f99d7fb90e396bf261c83b7f50b9c5b1d630101d7a72b67165d4e62"} Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.891642 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nhqb" event={"ID":"3602ee8f-3aa3-4873-a791-5e695083cd99","Type":"ContainerStarted","Data":"64c5aeb572ddc8957e12d027ff43b17bba6bff13a253f200886087966c4fa124"} Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.894043 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"065eac04ca9dedb03c8d59e1d839abb9c72df46c4304ac2652381fb471d5e814"} Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.894138 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.904774 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"1252e2d818936819a792643bbefc7817a9d82207aa09b72ea952f94792b94a1c"} Dec 08 21:21:10 crc kubenswrapper[4912]: I1208 21:21:10.909600 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tc2h7" event={"ID":"2f8e72c9-ed46-4394-b36c-64ae0b735077","Type":"ContainerStarted","Data":"ee09fbe83e452028cbbd60308aed9aaebc123e7c807aca52e6620e413d85730a"} Dec 08 21:21:11 crc kubenswrapper[4912]: I1208 21:21:11.320059 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 08 21:21:11 crc kubenswrapper[4912]: I1208 21:21:11.411585 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:11 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:11 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:11 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:11 crc kubenswrapper[4912]: I1208 21:21:11.411686 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:11 crc kubenswrapper[4912]: W1208 21:21:11.439494 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podeaa43cc1_7b37_4711_a833_d83c5d66a6e5.slice/crio-8afef0cc7e872928defb500802bf6ad707a4c2359c48e7d5a21a026e57a8e9a9 WatchSource:0}: Error finding container 8afef0cc7e872928defb500802bf6ad707a4c2359c48e7d5a21a026e57a8e9a9: Status 404 returned error can't find the container with id 8afef0cc7e872928defb500802bf6ad707a4c2359c48e7d5a21a026e57a8e9a9 Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.157979 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" event={"ID":"c5facd66-e234-44bc-b3b5-36f9860d98d1","Type":"ContainerStarted","Data":"7e4a13381d543d0c13ebca42391d58776999d266f4cf020238d79ebf8696346e"} Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.158687 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.161544 4912 generic.go:334] "Generic (PLEG): container finished" podID="2f8e72c9-ed46-4394-b36c-64ae0b735077" containerID="ee09fbe83e452028cbbd60308aed9aaebc123e7c807aca52e6620e413d85730a" exitCode=0 Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.161644 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tc2h7" event={"ID":"2f8e72c9-ed46-4394-b36c-64ae0b735077","Type":"ContainerDied","Data":"ee09fbe83e452028cbbd60308aed9aaebc123e7c807aca52e6620e413d85730a"} Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.162794 4912 generic.go:334] "Generic (PLEG): container finished" podID="ba9bc9a7-7ac4-43eb-a545-099564781a42" containerID="00f66cd362df975101e207944c3b17363c0581f337d86ddb4806c99409d9950c" exitCode=0 Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.162870 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p8tl" event={"ID":"ba9bc9a7-7ac4-43eb-a545-099564781a42","Type":"ContainerDied","Data":"00f66cd362df975101e207944c3b17363c0581f337d86ddb4806c99409d9950c"} Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.166943 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"eaa43cc1-7b37-4711-a833-d83c5d66a6e5","Type":"ContainerStarted","Data":"8afef0cc7e872928defb500802bf6ad707a4c2359c48e7d5a21a026e57a8e9a9"} Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.168731 4912 generic.go:334] "Generic (PLEG): container finished" podID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" containerID="5babc46824a3cfa6aeade81b93695f2cb83850927c770ed32fd9cd8bd7cc30e7" exitCode=0 Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.168912 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w779q" event={"ID":"7c2d0a46-44cd-4e4d-844c-99ab171020a8","Type":"ContainerDied","Data":"5babc46824a3cfa6aeade81b93695f2cb83850927c770ed32fd9cd8bd7cc30e7"} Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.172983 4912 generic.go:334] "Generic (PLEG): container finished" podID="3602ee8f-3aa3-4873-a791-5e695083cd99" containerID="64c5aeb572ddc8957e12d027ff43b17bba6bff13a253f200886087966c4fa124" exitCode=0 Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.173066 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nhqb" event={"ID":"3602ee8f-3aa3-4873-a791-5e695083cd99","Type":"ContainerDied","Data":"64c5aeb572ddc8957e12d027ff43b17bba6bff13a253f200886087966c4fa124"} Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.187486 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"75eefab48bca1af53953d9aa588c5cd6628b9ff190c68ffc8974b9659b11349b"} Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.187745 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" podStartSLOduration=135.187719901 podStartE2EDuration="2m15.187719901s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:12.183460343 +0000 UTC m=+154.046462436" watchObservedRunningTime="2025-12-08 21:21:12.187719901 +0000 UTC m=+154.050721984" Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.398133 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=7.398109848 podStartE2EDuration="7.398109848s" podCreationTimestamp="2025-12-08 21:21:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:12.394634447 +0000 UTC m=+154.257636540" watchObservedRunningTime="2025-12-08 21:21:12.398109848 +0000 UTC m=+154.261111931" Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.422523 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.461404 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-s2plb" Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.491280 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:12 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:12 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:12 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.491366 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:12 crc kubenswrapper[4912]: I1208 21:21:12.660947 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-bfkpt" Dec 08 21:21:13 crc kubenswrapper[4912]: I1208 21:21:13.197269 4912 generic.go:334] "Generic (PLEG): container finished" podID="4cef5b95-055b-4069-b2bf-65ef8665c9f8" containerID="ccdd842f1f99d7fb90e396bf261c83b7f50b9c5b1d630101d7a72b67165d4e62" exitCode=0 Dec 08 21:21:13 crc kubenswrapper[4912]: I1208 21:21:13.197515 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4cef5b95-055b-4069-b2bf-65ef8665c9f8","Type":"ContainerDied","Data":"ccdd842f1f99d7fb90e396bf261c83b7f50b9c5b1d630101d7a72b67165d4e62"} Dec 08 21:21:13 crc kubenswrapper[4912]: I1208 21:21:13.410264 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:13 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:13 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:13 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:13 crc kubenswrapper[4912]: I1208 21:21:13.410384 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:14 crc kubenswrapper[4912]: I1208 21:21:14.206482 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"eaa43cc1-7b37-4711-a833-d83c5d66a6e5","Type":"ContainerStarted","Data":"141e0f35acf48649392b76965924cf104645447acaf11510659b2cc8c0f6a348"} Dec 08 21:21:14 crc kubenswrapper[4912]: I1208 21:21:14.355271 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=4.355248495 podStartE2EDuration="4.355248495s" podCreationTimestamp="2025-12-08 21:21:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:14.354415418 +0000 UTC m=+156.217417511" watchObservedRunningTime="2025-12-08 21:21:14.355248495 +0000 UTC m=+156.218250588" Dec 08 21:21:14 crc kubenswrapper[4912]: I1208 21:21:14.410312 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:14 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:14 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:14 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:14 crc kubenswrapper[4912]: I1208 21:21:14.410443 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:15 crc kubenswrapper[4912]: I1208 21:21:15.413304 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:15 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:15 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:15 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:15 crc kubenswrapper[4912]: I1208 21:21:15.413730 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:16 crc kubenswrapper[4912]: I1208 21:21:16.232549 4912 generic.go:334] "Generic (PLEG): container finished" podID="eaa43cc1-7b37-4711-a833-d83c5d66a6e5" containerID="141e0f35acf48649392b76965924cf104645447acaf11510659b2cc8c0f6a348" exitCode=0 Dec 08 21:21:16 crc kubenswrapper[4912]: I1208 21:21:16.232638 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"eaa43cc1-7b37-4711-a833-d83c5d66a6e5","Type":"ContainerDied","Data":"141e0f35acf48649392b76965924cf104645447acaf11510659b2cc8c0f6a348"} Dec 08 21:21:16 crc kubenswrapper[4912]: I1208 21:21:16.411094 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:16 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:16 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:16 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:16 crc kubenswrapper[4912]: I1208 21:21:16.411162 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:16 crc kubenswrapper[4912]: I1208 21:21:16.703984 4912 patch_prober.go:28] interesting pod/downloads-7954f5f757-8rkpr container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 08 21:21:16 crc kubenswrapper[4912]: I1208 21:21:16.704112 4912 patch_prober.go:28] interesting pod/downloads-7954f5f757-8rkpr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 08 21:21:16 crc kubenswrapper[4912]: I1208 21:21:16.704168 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-8rkpr" podUID="b17a753b-7754-47a9-8432-0f1fab0fb3be" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 08 21:21:16 crc kubenswrapper[4912]: I1208 21:21:16.704201 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8rkpr" podUID="b17a753b-7754-47a9-8432-0f1fab0fb3be" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 08 21:21:17 crc kubenswrapper[4912]: I1208 21:21:17.390000 4912 patch_prober.go:28] interesting pod/console-f9d7485db-46knq container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Dec 08 21:21:17 crc kubenswrapper[4912]: I1208 21:21:17.390387 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-46knq" podUID="1627b83a-6756-4797-b857-7495c262d53c" containerName="console" probeResult="failure" output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" Dec 08 21:21:17 crc kubenswrapper[4912]: I1208 21:21:17.409681 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:17 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:17 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:17 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:17 crc kubenswrapper[4912]: I1208 21:21:17.410003 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:18 crc kubenswrapper[4912]: I1208 21:21:18.511222 4912 patch_prober.go:28] interesting pod/router-default-5444994796-b77mr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 21:21:18 crc kubenswrapper[4912]: [-]has-synced failed: reason withheld Dec 08 21:21:18 crc kubenswrapper[4912]: [+]process-running ok Dec 08 21:21:18 crc kubenswrapper[4912]: healthz check failed Dec 08 21:21:18 crc kubenswrapper[4912]: I1208 21:21:18.511638 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b77mr" podUID="0fb52c9e-bb73-483a-a064-b1bf85f5e901" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 21:21:19 crc kubenswrapper[4912]: I1208 21:21:19.438552 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:21:19 crc kubenswrapper[4912]: I1208 21:21:19.541809 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:21:19 crc kubenswrapper[4912]: I1208 21:21:19.542747 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:21:19 crc kubenswrapper[4912]: I1208 21:21:19.543265 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f259abd-9b12-458f-975d-68996ae1265c-metrics-certs\") pod \"network-metrics-daemon-lhjln\" (UID: \"6f259abd-9b12-458f-975d-68996ae1265c\") " pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:21:19 crc kubenswrapper[4912]: I1208 21:21:19.595224 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:21:19 crc kubenswrapper[4912]: I1208 21:21:19.613821 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-b77mr" Dec 08 21:21:19 crc kubenswrapper[4912]: I1208 21:21:19.636354 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lhjln" Dec 08 21:21:26 crc kubenswrapper[4912]: I1208 21:21:26.749953 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-8rkpr" Dec 08 21:21:27 crc kubenswrapper[4912]: I1208 21:21:27.004350 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:21:27 crc kubenswrapper[4912]: I1208 21:21:27.391992 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:21:27 crc kubenswrapper[4912]: I1208 21:21:27.398448 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:21:32 crc kubenswrapper[4912]: I1208 21:21:32.964844 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:21:32 crc kubenswrapper[4912]: I1208 21:21:32.965287 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.572202 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.577722 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.687184 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cef5b95-055b-4069-b2bf-65ef8665c9f8-kubelet-dir\") pod \"4cef5b95-055b-4069-b2bf-65ef8665c9f8\" (UID: \"4cef5b95-055b-4069-b2bf-65ef8665c9f8\") " Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.687758 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eaa43cc1-7b37-4711-a833-d83c5d66a6e5-kubelet-dir\") pod \"eaa43cc1-7b37-4711-a833-d83c5d66a6e5\" (UID: \"eaa43cc1-7b37-4711-a833-d83c5d66a6e5\") " Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.687318 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4cef5b95-055b-4069-b2bf-65ef8665c9f8-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4cef5b95-055b-4069-b2bf-65ef8665c9f8" (UID: "4cef5b95-055b-4069-b2bf-65ef8665c9f8"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.687843 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eaa43cc1-7b37-4711-a833-d83c5d66a6e5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "eaa43cc1-7b37-4711-a833-d83c5d66a6e5" (UID: "eaa43cc1-7b37-4711-a833-d83c5d66a6e5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.687930 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cef5b95-055b-4069-b2bf-65ef8665c9f8-kube-api-access\") pod \"4cef5b95-055b-4069-b2bf-65ef8665c9f8\" (UID: \"4cef5b95-055b-4069-b2bf-65ef8665c9f8\") " Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.688147 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eaa43cc1-7b37-4711-a833-d83c5d66a6e5-kube-api-access\") pod \"eaa43cc1-7b37-4711-a833-d83c5d66a6e5\" (UID: \"eaa43cc1-7b37-4711-a833-d83c5d66a6e5\") " Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.689028 4912 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cef5b95-055b-4069-b2bf-65ef8665c9f8-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.689086 4912 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eaa43cc1-7b37-4711-a833-d83c5d66a6e5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.696679 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaa43cc1-7b37-4711-a833-d83c5d66a6e5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "eaa43cc1-7b37-4711-a833-d83c5d66a6e5" (UID: "eaa43cc1-7b37-4711-a833-d83c5d66a6e5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.698587 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cef5b95-055b-4069-b2bf-65ef8665c9f8-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4cef5b95-055b-4069-b2bf-65ef8665c9f8" (UID: "4cef5b95-055b-4069-b2bf-65ef8665c9f8"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.790938 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cef5b95-055b-4069-b2bf-65ef8665c9f8-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.791453 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eaa43cc1-7b37-4711-a833-d83c5d66a6e5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.809435 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"eaa43cc1-7b37-4711-a833-d83c5d66a6e5","Type":"ContainerDied","Data":"8afef0cc7e872928defb500802bf6ad707a4c2359c48e7d5a21a026e57a8e9a9"} Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.809496 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8afef0cc7e872928defb500802bf6ad707a4c2359c48e7d5a21a026e57a8e9a9" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.809573 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.812224 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4cef5b95-055b-4069-b2bf-65ef8665c9f8","Type":"ContainerDied","Data":"3857d0ceb481860c7e2ba59a247e26e72b8e3c0a4aa54a0c2595b3254e639c6d"} Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.812275 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 21:21:33 crc kubenswrapper[4912]: I1208 21:21:33.812276 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3857d0ceb481860c7e2ba59a247e26e72b8e3c0a4aa54a0c2595b3254e639c6d" Dec 08 21:21:38 crc kubenswrapper[4912]: I1208 21:21:38.191678 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ct9x7" Dec 08 21:21:39 crc kubenswrapper[4912]: E1208 21:21:39.344344 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:758140d0f48f2c4b1fd270456aaec990b7301faaec10de81de1eeb6a74ea7609: Get \"https://registry.redhat.io/v2/redhat/certified-operator-index/blobs/sha256:758140d0f48f2c4b1fd270456aaec990b7301faaec10de81de1eeb6a74ea7609\": context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 08 21:21:39 crc kubenswrapper[4912]: E1208 21:21:39.345109 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zm64g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-2k2fc_openshift-marketplace(570435e6-b620-4b1c-8f4b-47b36f3bee5e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:758140d0f48f2c4b1fd270456aaec990b7301faaec10de81de1eeb6a74ea7609: Get \"https://registry.redhat.io/v2/redhat/certified-operator-index/blobs/sha256:758140d0f48f2c4b1fd270456aaec990b7301faaec10de81de1eeb6a74ea7609\": context canceled" logger="UnhandledError" Dec 08 21:21:39 crc kubenswrapper[4912]: E1208 21:21:39.346312 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:758140d0f48f2c4b1fd270456aaec990b7301faaec10de81de1eeb6a74ea7609: Get \\\"https://registry.redhat.io/v2/redhat/certified-operator-index/blobs/sha256:758140d0f48f2c4b1fd270456aaec990b7301faaec10de81de1eeb6a74ea7609\\\": context canceled\"" pod="openshift-marketplace/certified-operators-2k2fc" podUID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" Dec 08 21:21:39 crc kubenswrapper[4912]: E1208 21:21:39.417480 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 08 21:21:39 crc kubenswrapper[4912]: E1208 21:21:39.418173 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nntdh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-2x2xd_openshift-marketplace(c56535e8-663a-43a9-b596-79a4d3ac0403): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:21:39 crc kubenswrapper[4912]: E1208 21:21:39.419632 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-2x2xd" podUID="c56535e8-663a-43a9-b596-79a4d3ac0403" Dec 08 21:21:39 crc kubenswrapper[4912]: I1208 21:21:39.825865 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-lhjln"] Dec 08 21:21:42 crc kubenswrapper[4912]: E1208 21:21:42.024166 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-2k2fc" podUID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" Dec 08 21:21:42 crc kubenswrapper[4912]: E1208 21:21:42.025504 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-2x2xd" podUID="c56535e8-663a-43a9-b596-79a4d3ac0403" Dec 08 21:21:42 crc kubenswrapper[4912]: E1208 21:21:42.050056 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 08 21:21:42 crc kubenswrapper[4912]: E1208 21:21:42.050712 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kfc8t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-pk9xl_openshift-marketplace(6f4e34c7-0a83-44db-8ee4-b7ccac967bc9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:21:42 crc kubenswrapper[4912]: E1208 21:21:42.052066 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-pk9xl" podUID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" Dec 08 21:21:42 crc kubenswrapper[4912]: I1208 21:21:42.870043 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-lhjln" event={"ID":"6f259abd-9b12-458f-975d-68996ae1265c","Type":"ContainerStarted","Data":"5269b8691419dc938679cf36bd667f589f6e79f95879fca34cde9df05b0e7bc9"} Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.590648 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 08 21:21:44 crc kubenswrapper[4912]: E1208 21:21:44.592762 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaa43cc1-7b37-4711-a833-d83c5d66a6e5" containerName="pruner" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.592843 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaa43cc1-7b37-4711-a833-d83c5d66a6e5" containerName="pruner" Dec 08 21:21:44 crc kubenswrapper[4912]: E1208 21:21:44.592910 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cef5b95-055b-4069-b2bf-65ef8665c9f8" containerName="pruner" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.592970 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cef5b95-055b-4069-b2bf-65ef8665c9f8" containerName="pruner" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.593160 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cef5b95-055b-4069-b2bf-65ef8665c9f8" containerName="pruner" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.593226 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaa43cc1-7b37-4711-a833-d83c5d66a6e5" containerName="pruner" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.593734 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.596246 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.600885 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.601566 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.772729 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/28edb1a1-2bc6-48a1-b115-08ada97d1e4d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"28edb1a1-2bc6-48a1-b115-08ada97d1e4d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.772806 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/28edb1a1-2bc6-48a1-b115-08ada97d1e4d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"28edb1a1-2bc6-48a1-b115-08ada97d1e4d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.873750 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/28edb1a1-2bc6-48a1-b115-08ada97d1e4d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"28edb1a1-2bc6-48a1-b115-08ada97d1e4d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.873855 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/28edb1a1-2bc6-48a1-b115-08ada97d1e4d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"28edb1a1-2bc6-48a1-b115-08ada97d1e4d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.874328 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/28edb1a1-2bc6-48a1-b115-08ada97d1e4d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"28edb1a1-2bc6-48a1-b115-08ada97d1e4d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.894771 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/28edb1a1-2bc6-48a1-b115-08ada97d1e4d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"28edb1a1-2bc6-48a1-b115-08ada97d1e4d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:21:44 crc kubenswrapper[4912]: I1208 21:21:44.934348 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:21:45 crc kubenswrapper[4912]: I1208 21:21:45.545187 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 21:21:45 crc kubenswrapper[4912]: E1208 21:21:45.833248 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-pk9xl" podUID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" Dec 08 21:21:47 crc kubenswrapper[4912]: E1208 21:21:47.224188 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 08 21:21:47 crc kubenswrapper[4912]: E1208 21:21:47.224417 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5kmcq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-w779q_openshift-marketplace(7c2d0a46-44cd-4e4d-844c-99ab171020a8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:21:47 crc kubenswrapper[4912]: E1208 21:21:47.225669 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-w779q" podUID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" Dec 08 21:21:47 crc kubenswrapper[4912]: I1208 21:21:47.633587 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 08 21:21:47 crc kubenswrapper[4912]: W1208 21:21:47.641050 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod28edb1a1_2bc6_48a1_b115_08ada97d1e4d.slice/crio-45a4e8e40a0618ccbb4aba74c95b1fdebd0d2e7da5c2c76c6600b1bac723a224 WatchSource:0}: Error finding container 45a4e8e40a0618ccbb4aba74c95b1fdebd0d2e7da5c2c76c6600b1bac723a224: Status 404 returned error can't find the container with id 45a4e8e40a0618ccbb4aba74c95b1fdebd0d2e7da5c2c76c6600b1bac723a224 Dec 08 21:21:47 crc kubenswrapper[4912]: I1208 21:21:47.973878 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tc2h7" event={"ID":"2f8e72c9-ed46-4394-b36c-64ae0b735077","Type":"ContainerStarted","Data":"8c7e772e2b96605fed8c87df1885ba2da376da5d4e0d59b7b5fa564ff82eacee"} Dec 08 21:21:47 crc kubenswrapper[4912]: I1208 21:21:47.994973 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p8tl" event={"ID":"ba9bc9a7-7ac4-43eb-a545-099564781a42","Type":"ContainerStarted","Data":"ec6e279a4c480d8f113079918cc9353a22c7e977220f7485ace6d62507b543ae"} Dec 08 21:21:48 crc kubenswrapper[4912]: I1208 21:21:48.000292 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nhqb" event={"ID":"3602ee8f-3aa3-4873-a791-5e695083cd99","Type":"ContainerStarted","Data":"7439640f3253238f92481d2c93e0fc41e36c6ae6b559e137af1298e73d3832c2"} Dec 08 21:21:48 crc kubenswrapper[4912]: I1208 21:21:48.005527 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hgl2c" event={"ID":"139572d3-34d8-4dc2-ac47-d2597bdfb3ec","Type":"ContainerStarted","Data":"afe18c93fc3746fbd208875cda1920f65e6787e8f404218e8bd436d33dd9b96b"} Dec 08 21:21:48 crc kubenswrapper[4912]: I1208 21:21:48.022679 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"28edb1a1-2bc6-48a1-b115-08ada97d1e4d","Type":"ContainerStarted","Data":"45a4e8e40a0618ccbb4aba74c95b1fdebd0d2e7da5c2c76c6600b1bac723a224"} Dec 08 21:21:48 crc kubenswrapper[4912]: I1208 21:21:48.051326 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-lhjln" event={"ID":"6f259abd-9b12-458f-975d-68996ae1265c","Type":"ContainerStarted","Data":"b94cc975abfba4058711492f93e42efd0a06e45fd1dff16de8eb41f6c9cc37b4"} Dec 08 21:21:48 crc kubenswrapper[4912]: E1208 21:21:48.058835 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-w779q" podUID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" Dec 08 21:21:49 crc kubenswrapper[4912]: I1208 21:21:49.057240 4912 generic.go:334] "Generic (PLEG): container finished" podID="139572d3-34d8-4dc2-ac47-d2597bdfb3ec" containerID="afe18c93fc3746fbd208875cda1920f65e6787e8f404218e8bd436d33dd9b96b" exitCode=0 Dec 08 21:21:49 crc kubenswrapper[4912]: I1208 21:21:49.057600 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hgl2c" event={"ID":"139572d3-34d8-4dc2-ac47-d2597bdfb3ec","Type":"ContainerDied","Data":"afe18c93fc3746fbd208875cda1920f65e6787e8f404218e8bd436d33dd9b96b"} Dec 08 21:21:49 crc kubenswrapper[4912]: I1208 21:21:49.061182 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"28edb1a1-2bc6-48a1-b115-08ada97d1e4d","Type":"ContainerStarted","Data":"fde3d2fdfcd6426f55f2f3944c93c240a982a183e1a39fc9935a472d6db8ab5a"} Dec 08 21:21:49 crc kubenswrapper[4912]: I1208 21:21:49.073286 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-lhjln" event={"ID":"6f259abd-9b12-458f-975d-68996ae1265c","Type":"ContainerStarted","Data":"6104108b9e4f5039ddcc04f4d394cbacd6096fbe9132782e7ae38d112c877bfc"} Dec 08 21:21:49 crc kubenswrapper[4912]: I1208 21:21:49.075147 4912 generic.go:334] "Generic (PLEG): container finished" podID="ba9bc9a7-7ac4-43eb-a545-099564781a42" containerID="ec6e279a4c480d8f113079918cc9353a22c7e977220f7485ace6d62507b543ae" exitCode=0 Dec 08 21:21:49 crc kubenswrapper[4912]: I1208 21:21:49.075286 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p8tl" event={"ID":"ba9bc9a7-7ac4-43eb-a545-099564781a42","Type":"ContainerDied","Data":"ec6e279a4c480d8f113079918cc9353a22c7e977220f7485ace6d62507b543ae"} Dec 08 21:21:49 crc kubenswrapper[4912]: I1208 21:21:49.107433 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=5.107406546 podStartE2EDuration="5.107406546s" podCreationTimestamp="2025-12-08 21:21:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:49.104990228 +0000 UTC m=+190.967992311" watchObservedRunningTime="2025-12-08 21:21:49.107406546 +0000 UTC m=+190.970408629" Dec 08 21:21:49 crc kubenswrapper[4912]: I1208 21:21:49.130572 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-lhjln" podStartSLOduration=172.130551023 podStartE2EDuration="2m52.130551023s" podCreationTimestamp="2025-12-08 21:18:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:49.130105413 +0000 UTC m=+190.993107536" watchObservedRunningTime="2025-12-08 21:21:49.130551023 +0000 UTC m=+190.993553106" Dec 08 21:21:50 crc kubenswrapper[4912]: I1208 21:21:50.084126 4912 generic.go:334] "Generic (PLEG): container finished" podID="3602ee8f-3aa3-4873-a791-5e695083cd99" containerID="7439640f3253238f92481d2c93e0fc41e36c6ae6b559e137af1298e73d3832c2" exitCode=0 Dec 08 21:21:50 crc kubenswrapper[4912]: I1208 21:21:50.084246 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nhqb" event={"ID":"3602ee8f-3aa3-4873-a791-5e695083cd99","Type":"ContainerDied","Data":"7439640f3253238f92481d2c93e0fc41e36c6ae6b559e137af1298e73d3832c2"} Dec 08 21:21:50 crc kubenswrapper[4912]: I1208 21:21:50.092365 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hgl2c" event={"ID":"139572d3-34d8-4dc2-ac47-d2597bdfb3ec","Type":"ContainerStarted","Data":"6481f14ae568d999917bd2f5dd579a77ae7d8accbca35d11b31f8523eaf89c74"} Dec 08 21:21:50 crc kubenswrapper[4912]: I1208 21:21:50.098803 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p8tl" event={"ID":"ba9bc9a7-7ac4-43eb-a545-099564781a42","Type":"ContainerStarted","Data":"8929b262f7c1a2da65b5091b9c1a0f61d1a26f6903d79f85c11d68579fb78210"} Dec 08 21:21:50 crc kubenswrapper[4912]: I1208 21:21:50.121646 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hgl2c" podStartSLOduration=5.374094299 podStartE2EDuration="46.121621113s" podCreationTimestamp="2025-12-08 21:21:04 +0000 UTC" firstStartedPulling="2025-12-08 21:21:08.917131958 +0000 UTC m=+150.780134041" lastFinishedPulling="2025-12-08 21:21:49.664658772 +0000 UTC m=+191.527660855" observedRunningTime="2025-12-08 21:21:50.118556229 +0000 UTC m=+191.981558312" watchObservedRunningTime="2025-12-08 21:21:50.121621113 +0000 UTC m=+191.984623196" Dec 08 21:21:50 crc kubenswrapper[4912]: I1208 21:21:50.179939 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8p8tl" podStartSLOduration=8.81828747 podStartE2EDuration="45.179913297s" podCreationTimestamp="2025-12-08 21:21:05 +0000 UTC" firstStartedPulling="2025-12-08 21:21:12.163762757 +0000 UTC m=+154.026764840" lastFinishedPulling="2025-12-08 21:21:48.525388584 +0000 UTC m=+190.388390667" observedRunningTime="2025-12-08 21:21:50.174798833 +0000 UTC m=+192.037800916" watchObservedRunningTime="2025-12-08 21:21:50.179913297 +0000 UTC m=+192.042915380" Dec 08 21:21:50 crc kubenswrapper[4912]: E1208 21:21:50.442114 4912 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f8e72c9_ed46_4394_b36c_64ae0b735077.slice/crio-conmon-8c7e772e2b96605fed8c87df1885ba2da376da5d4e0d59b7b5fa564ff82eacee.scope\": RecentStats: unable to find data in memory cache]" Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.104280 4912 generic.go:334] "Generic (PLEG): container finished" podID="2f8e72c9-ed46-4394-b36c-64ae0b735077" containerID="8c7e772e2b96605fed8c87df1885ba2da376da5d4e0d59b7b5fa564ff82eacee" exitCode=0 Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.104463 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tc2h7" event={"ID":"2f8e72c9-ed46-4394-b36c-64ae0b735077","Type":"ContainerDied","Data":"8c7e772e2b96605fed8c87df1885ba2da376da5d4e0d59b7b5fa564ff82eacee"} Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.109170 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nhqb" event={"ID":"3602ee8f-3aa3-4873-a791-5e695083cd99","Type":"ContainerStarted","Data":"a0fadcbf89dc836c83019ef396e89af7dd2bd7912cf2a611c205b93334fc56d5"} Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.112196 4912 generic.go:334] "Generic (PLEG): container finished" podID="28edb1a1-2bc6-48a1-b115-08ada97d1e4d" containerID="fde3d2fdfcd6426f55f2f3944c93c240a982a183e1a39fc9935a472d6db8ab5a" exitCode=0 Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.113238 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"28edb1a1-2bc6-48a1-b115-08ada97d1e4d","Type":"ContainerDied","Data":"fde3d2fdfcd6426f55f2f3944c93c240a982a183e1a39fc9935a472d6db8ab5a"} Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.167358 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7nhqb" podStartSLOduration=6.829589944 podStartE2EDuration="45.167336638s" podCreationTimestamp="2025-12-08 21:21:06 +0000 UTC" firstStartedPulling="2025-12-08 21:21:12.175803415 +0000 UTC m=+154.038805498" lastFinishedPulling="2025-12-08 21:21:50.513550109 +0000 UTC m=+192.376552192" observedRunningTime="2025-12-08 21:21:51.163209218 +0000 UTC m=+193.026211321" watchObservedRunningTime="2025-12-08 21:21:51.167336638 +0000 UTC m=+193.030338721" Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.391361 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.392392 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.447987 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.585807 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/16751926-fba8-4f6d-9e55-09ed0751ff06-kube-api-access\") pod \"installer-9-crc\" (UID: \"16751926-fba8-4f6d-9e55-09ed0751ff06\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.585871 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/16751926-fba8-4f6d-9e55-09ed0751ff06-var-lock\") pod \"installer-9-crc\" (UID: \"16751926-fba8-4f6d-9e55-09ed0751ff06\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.585890 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/16751926-fba8-4f6d-9e55-09ed0751ff06-kubelet-dir\") pod \"installer-9-crc\" (UID: \"16751926-fba8-4f6d-9e55-09ed0751ff06\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.686512 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/16751926-fba8-4f6d-9e55-09ed0751ff06-kube-api-access\") pod \"installer-9-crc\" (UID: \"16751926-fba8-4f6d-9e55-09ed0751ff06\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.686562 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/16751926-fba8-4f6d-9e55-09ed0751ff06-var-lock\") pod \"installer-9-crc\" (UID: \"16751926-fba8-4f6d-9e55-09ed0751ff06\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.686586 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/16751926-fba8-4f6d-9e55-09ed0751ff06-kubelet-dir\") pod \"installer-9-crc\" (UID: \"16751926-fba8-4f6d-9e55-09ed0751ff06\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.686668 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/16751926-fba8-4f6d-9e55-09ed0751ff06-kubelet-dir\") pod \"installer-9-crc\" (UID: \"16751926-fba8-4f6d-9e55-09ed0751ff06\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.686703 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/16751926-fba8-4f6d-9e55-09ed0751ff06-var-lock\") pod \"installer-9-crc\" (UID: \"16751926-fba8-4f6d-9e55-09ed0751ff06\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:21:51 crc kubenswrapper[4912]: I1208 21:21:51.708227 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/16751926-fba8-4f6d-9e55-09ed0751ff06-kube-api-access\") pod \"installer-9-crc\" (UID: \"16751926-fba8-4f6d-9e55-09ed0751ff06\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:21:52 crc kubenswrapper[4912]: I1208 21:21:52.008380 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:21:52 crc kubenswrapper[4912]: I1208 21:21:52.148015 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tc2h7" event={"ID":"2f8e72c9-ed46-4394-b36c-64ae0b735077","Type":"ContainerStarted","Data":"7e1f4fd4a8f2f7f71c8cd04d5a35636dc0fc97ff2837f981c6a1ad5ae96ede9f"} Dec 08 21:21:52 crc kubenswrapper[4912]: I1208 21:21:52.178097 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tc2h7" podStartSLOduration=5.813260444 podStartE2EDuration="45.178069571s" podCreationTimestamp="2025-12-08 21:21:07 +0000 UTC" firstStartedPulling="2025-12-08 21:21:12.163522142 +0000 UTC m=+154.026524225" lastFinishedPulling="2025-12-08 21:21:51.528331259 +0000 UTC m=+193.391333352" observedRunningTime="2025-12-08 21:21:52.17431142 +0000 UTC m=+194.037313513" watchObservedRunningTime="2025-12-08 21:21:52.178069571 +0000 UTC m=+194.041071664" Dec 08 21:21:52 crc kubenswrapper[4912]: I1208 21:21:52.474941 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 08 21:21:52 crc kubenswrapper[4912]: W1208 21:21:52.475935 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod16751926_fba8_4f6d_9e55_09ed0751ff06.slice/crio-c4af4f45151e31c39ab327c02ee007d1ebde7ff081c200fcaf1cbe1baa0169b8 WatchSource:0}: Error finding container c4af4f45151e31c39ab327c02ee007d1ebde7ff081c200fcaf1cbe1baa0169b8: Status 404 returned error can't find the container with id c4af4f45151e31c39ab327c02ee007d1ebde7ff081c200fcaf1cbe1baa0169b8 Dec 08 21:21:52 crc kubenswrapper[4912]: I1208 21:21:52.533931 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:21:52 crc kubenswrapper[4912]: I1208 21:21:52.717839 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/28edb1a1-2bc6-48a1-b115-08ada97d1e4d-kubelet-dir\") pod \"28edb1a1-2bc6-48a1-b115-08ada97d1e4d\" (UID: \"28edb1a1-2bc6-48a1-b115-08ada97d1e4d\") " Dec 08 21:21:52 crc kubenswrapper[4912]: I1208 21:21:52.717995 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/28edb1a1-2bc6-48a1-b115-08ada97d1e4d-kube-api-access\") pod \"28edb1a1-2bc6-48a1-b115-08ada97d1e4d\" (UID: \"28edb1a1-2bc6-48a1-b115-08ada97d1e4d\") " Dec 08 21:21:52 crc kubenswrapper[4912]: I1208 21:21:52.718315 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28edb1a1-2bc6-48a1-b115-08ada97d1e4d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "28edb1a1-2bc6-48a1-b115-08ada97d1e4d" (UID: "28edb1a1-2bc6-48a1-b115-08ada97d1e4d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:21:52 crc kubenswrapper[4912]: I1208 21:21:52.724935 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28edb1a1-2bc6-48a1-b115-08ada97d1e4d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "28edb1a1-2bc6-48a1-b115-08ada97d1e4d" (UID: "28edb1a1-2bc6-48a1-b115-08ada97d1e4d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:21:52 crc kubenswrapper[4912]: I1208 21:21:52.819952 4912 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/28edb1a1-2bc6-48a1-b115-08ada97d1e4d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:52 crc kubenswrapper[4912]: I1208 21:21:52.820522 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/28edb1a1-2bc6-48a1-b115-08ada97d1e4d-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:21:53 crc kubenswrapper[4912]: I1208 21:21:53.154467 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"28edb1a1-2bc6-48a1-b115-08ada97d1e4d","Type":"ContainerDied","Data":"45a4e8e40a0618ccbb4aba74c95b1fdebd0d2e7da5c2c76c6600b1bac723a224"} Dec 08 21:21:53 crc kubenswrapper[4912]: I1208 21:21:53.154531 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45a4e8e40a0618ccbb4aba74c95b1fdebd0d2e7da5c2c76c6600b1bac723a224" Dec 08 21:21:53 crc kubenswrapper[4912]: I1208 21:21:53.154614 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 21:21:53 crc kubenswrapper[4912]: I1208 21:21:53.159114 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"16751926-fba8-4f6d-9e55-09ed0751ff06","Type":"ContainerStarted","Data":"c4af4f45151e31c39ab327c02ee007d1ebde7ff081c200fcaf1cbe1baa0169b8"} Dec 08 21:21:54 crc kubenswrapper[4912]: I1208 21:21:54.173431 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"16751926-fba8-4f6d-9e55-09ed0751ff06","Type":"ContainerStarted","Data":"4a8f0b017a79c5487642df145c3564d39c605004a05f680b9b9e51bac2a299f5"} Dec 08 21:21:54 crc kubenswrapper[4912]: I1208 21:21:54.197610 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=3.197590781 podStartE2EDuration="3.197590781s" podCreationTimestamp="2025-12-08 21:21:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:21:54.193858091 +0000 UTC m=+196.056860174" watchObservedRunningTime="2025-12-08 21:21:54.197590781 +0000 UTC m=+196.060592864" Dec 08 21:21:54 crc kubenswrapper[4912]: I1208 21:21:54.444333 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:54 crc kubenswrapper[4912]: I1208 21:21:54.444389 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:56 crc kubenswrapper[4912]: I1208 21:21:56.067318 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:56 crc kubenswrapper[4912]: I1208 21:21:56.125431 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:21:56 crc kubenswrapper[4912]: I1208 21:21:56.330369 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:56 crc kubenswrapper[4912]: I1208 21:21:56.331467 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:56 crc kubenswrapper[4912]: I1208 21:21:56.373370 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:56 crc kubenswrapper[4912]: I1208 21:21:56.640171 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wt7vh"] Dec 08 21:21:57 crc kubenswrapper[4912]: I1208 21:21:57.233531 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:21:57 crc kubenswrapper[4912]: I1208 21:21:57.619717 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:57 crc kubenswrapper[4912]: I1208 21:21:57.619908 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:57 crc kubenswrapper[4912]: I1208 21:21:57.660793 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:57 crc kubenswrapper[4912]: I1208 21:21:57.827327 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hgl2c"] Dec 08 21:21:57 crc kubenswrapper[4912]: I1208 21:21:57.827675 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hgl2c" podUID="139572d3-34d8-4dc2-ac47-d2597bdfb3ec" containerName="registry-server" containerID="cri-o://6481f14ae568d999917bd2f5dd579a77ae7d8accbca35d11b31f8523eaf89c74" gracePeriod=2 Dec 08 21:21:58 crc kubenswrapper[4912]: I1208 21:21:58.167439 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:58 crc kubenswrapper[4912]: I1208 21:21:58.167520 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:58 crc kubenswrapper[4912]: I1208 21:21:58.209397 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:58 crc kubenswrapper[4912]: I1208 21:21:58.240761 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:21:58 crc kubenswrapper[4912]: I1208 21:21:58.526546 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:21:59 crc kubenswrapper[4912]: I1208 21:21:59.203331 4912 generic.go:334] "Generic (PLEG): container finished" podID="139572d3-34d8-4dc2-ac47-d2597bdfb3ec" containerID="6481f14ae568d999917bd2f5dd579a77ae7d8accbca35d11b31f8523eaf89c74" exitCode=0 Dec 08 21:21:59 crc kubenswrapper[4912]: I1208 21:21:59.203417 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hgl2c" event={"ID":"139572d3-34d8-4dc2-ac47-d2597bdfb3ec","Type":"ContainerDied","Data":"6481f14ae568d999917bd2f5dd579a77ae7d8accbca35d11b31f8523eaf89c74"} Dec 08 21:22:00 crc kubenswrapper[4912]: I1208 21:22:00.228613 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tc2h7"] Dec 08 21:22:00 crc kubenswrapper[4912]: I1208 21:22:00.229327 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tc2h7" podUID="2f8e72c9-ed46-4394-b36c-64ae0b735077" containerName="registry-server" containerID="cri-o://7e1f4fd4a8f2f7f71c8cd04d5a35636dc0fc97ff2837f981c6a1ad5ae96ede9f" gracePeriod=2 Dec 08 21:22:01 crc kubenswrapper[4912]: I1208 21:22:01.217359 4912 generic.go:334] "Generic (PLEG): container finished" podID="2f8e72c9-ed46-4394-b36c-64ae0b735077" containerID="7e1f4fd4a8f2f7f71c8cd04d5a35636dc0fc97ff2837f981c6a1ad5ae96ede9f" exitCode=0 Dec 08 21:22:01 crc kubenswrapper[4912]: I1208 21:22:01.217435 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tc2h7" event={"ID":"2f8e72c9-ed46-4394-b36c-64ae0b735077","Type":"ContainerDied","Data":"7e1f4fd4a8f2f7f71c8cd04d5a35636dc0fc97ff2837f981c6a1ad5ae96ede9f"} Dec 08 21:22:01 crc kubenswrapper[4912]: I1208 21:22:01.660647 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:22:01 crc kubenswrapper[4912]: I1208 21:22:01.850613 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-utilities\") pod \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\" (UID: \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\") " Dec 08 21:22:01 crc kubenswrapper[4912]: I1208 21:22:01.850749 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-catalog-content\") pod \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\" (UID: \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\") " Dec 08 21:22:01 crc kubenswrapper[4912]: I1208 21:22:01.850782 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lsds\" (UniqueName: \"kubernetes.io/projected/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-kube-api-access-6lsds\") pod \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\" (UID: \"139572d3-34d8-4dc2-ac47-d2597bdfb3ec\") " Dec 08 21:22:01 crc kubenswrapper[4912]: I1208 21:22:01.851676 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-utilities" (OuterVolumeSpecName: "utilities") pod "139572d3-34d8-4dc2-ac47-d2597bdfb3ec" (UID: "139572d3-34d8-4dc2-ac47-d2597bdfb3ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:22:01 crc kubenswrapper[4912]: I1208 21:22:01.855557 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-kube-api-access-6lsds" (OuterVolumeSpecName: "kube-api-access-6lsds") pod "139572d3-34d8-4dc2-ac47-d2597bdfb3ec" (UID: "139572d3-34d8-4dc2-ac47-d2597bdfb3ec"). InnerVolumeSpecName "kube-api-access-6lsds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:22:01 crc kubenswrapper[4912]: I1208 21:22:01.951933 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lsds\" (UniqueName: \"kubernetes.io/projected/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-kube-api-access-6lsds\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:01 crc kubenswrapper[4912]: I1208 21:22:01.951973 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:01 crc kubenswrapper[4912]: I1208 21:22:01.986301 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "139572d3-34d8-4dc2-ac47-d2597bdfb3ec" (UID: "139572d3-34d8-4dc2-ac47-d2597bdfb3ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.053226 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/139572d3-34d8-4dc2-ac47-d2597bdfb3ec-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.227410 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hgl2c" event={"ID":"139572d3-34d8-4dc2-ac47-d2597bdfb3ec","Type":"ContainerDied","Data":"95e47dd73e3fd7cda2c323c078a358b68b13d226728e40a132cbc20e283dbd53"} Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.227452 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hgl2c" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.227499 4912 scope.go:117] "RemoveContainer" containerID="6481f14ae568d999917bd2f5dd579a77ae7d8accbca35d11b31f8523eaf89c74" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.232333 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tc2h7" event={"ID":"2f8e72c9-ed46-4394-b36c-64ae0b735077","Type":"ContainerDied","Data":"cb5f9e7fe9810fabf646fca6f77f1bb84436969e60a8fadeff22df32fffe0b14"} Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.232449 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb5f9e7fe9810fabf646fca6f77f1bb84436969e60a8fadeff22df32fffe0b14" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.266842 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.285626 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hgl2c"] Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.294600 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hgl2c"] Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.434473 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="139572d3-34d8-4dc2-ac47-d2597bdfb3ec" path="/var/lib/kubelet/pods/139572d3-34d8-4dc2-ac47-d2597bdfb3ec/volumes" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.458315 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f8e72c9-ed46-4394-b36c-64ae0b735077-catalog-content\") pod \"2f8e72c9-ed46-4394-b36c-64ae0b735077\" (UID: \"2f8e72c9-ed46-4394-b36c-64ae0b735077\") " Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.458527 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f8e72c9-ed46-4394-b36c-64ae0b735077-utilities\") pod \"2f8e72c9-ed46-4394-b36c-64ae0b735077\" (UID: \"2f8e72c9-ed46-4394-b36c-64ae0b735077\") " Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.458630 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hv9hg\" (UniqueName: \"kubernetes.io/projected/2f8e72c9-ed46-4394-b36c-64ae0b735077-kube-api-access-hv9hg\") pod \"2f8e72c9-ed46-4394-b36c-64ae0b735077\" (UID: \"2f8e72c9-ed46-4394-b36c-64ae0b735077\") " Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.459338 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f8e72c9-ed46-4394-b36c-64ae0b735077-utilities" (OuterVolumeSpecName: "utilities") pod "2f8e72c9-ed46-4394-b36c-64ae0b735077" (UID: "2f8e72c9-ed46-4394-b36c-64ae0b735077"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.464336 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f8e72c9-ed46-4394-b36c-64ae0b735077-kube-api-access-hv9hg" (OuterVolumeSpecName: "kube-api-access-hv9hg") pod "2f8e72c9-ed46-4394-b36c-64ae0b735077" (UID: "2f8e72c9-ed46-4394-b36c-64ae0b735077"). InnerVolumeSpecName "kube-api-access-hv9hg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.561168 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hv9hg\" (UniqueName: \"kubernetes.io/projected/2f8e72c9-ed46-4394-b36c-64ae0b735077-kube-api-access-hv9hg\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.561232 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f8e72c9-ed46-4394-b36c-64ae0b735077-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.965237 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.965343 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.965416 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.966689 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:22:02 crc kubenswrapper[4912]: I1208 21:22:02.967016 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed" gracePeriod=600 Dec 08 21:22:03 crc kubenswrapper[4912]: I1208 21:22:03.237678 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tc2h7" Dec 08 21:22:03 crc kubenswrapper[4912]: I1208 21:22:03.414116 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f8e72c9-ed46-4394-b36c-64ae0b735077-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f8e72c9-ed46-4394-b36c-64ae0b735077" (UID: "2f8e72c9-ed46-4394-b36c-64ae0b735077"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:22:03 crc kubenswrapper[4912]: I1208 21:22:03.475977 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f8e72c9-ed46-4394-b36c-64ae0b735077-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:03 crc kubenswrapper[4912]: I1208 21:22:03.565841 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tc2h7"] Dec 08 21:22:03 crc kubenswrapper[4912]: I1208 21:22:03.569462 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tc2h7"] Dec 08 21:22:04 crc kubenswrapper[4912]: I1208 21:22:04.438191 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f8e72c9-ed46-4394-b36c-64ae0b735077" path="/var/lib/kubelet/pods/2f8e72c9-ed46-4394-b36c-64ae0b735077/volumes" Dec 08 21:22:05 crc kubenswrapper[4912]: I1208 21:22:05.253684 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed" exitCode=0 Dec 08 21:22:05 crc kubenswrapper[4912]: I1208 21:22:05.253748 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed"} Dec 08 21:22:06 crc kubenswrapper[4912]: I1208 21:22:06.057660 4912 scope.go:117] "RemoveContainer" containerID="afe18c93fc3746fbd208875cda1920f65e6787e8f404218e8bd436d33dd9b96b" Dec 08 21:22:06 crc kubenswrapper[4912]: I1208 21:22:06.718876 4912 scope.go:117] "RemoveContainer" containerID="49980ca768af985b963c50d3f61a3451f9f85da28dd8125ed3110e0dee961606" Dec 08 21:22:08 crc kubenswrapper[4912]: I1208 21:22:08.277373 4912 generic.go:334] "Generic (PLEG): container finished" podID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" containerID="f50ca9c38c9229d387ed4eef5040abdbbf10482c43ed2a5cd84741222cc335bd" exitCode=0 Dec 08 21:22:08 crc kubenswrapper[4912]: I1208 21:22:08.277393 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pk9xl" event={"ID":"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9","Type":"ContainerDied","Data":"f50ca9c38c9229d387ed4eef5040abdbbf10482c43ed2a5cd84741222cc335bd"} Dec 08 21:22:08 crc kubenswrapper[4912]: I1208 21:22:08.280990 4912 generic.go:334] "Generic (PLEG): container finished" podID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" containerID="32f8d40cf7223d47fd8efd65544ddc68d2d95ab53d18a8e7c7f70f5beb2203ae" exitCode=0 Dec 08 21:22:08 crc kubenswrapper[4912]: I1208 21:22:08.281079 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2k2fc" event={"ID":"570435e6-b620-4b1c-8f4b-47b36f3bee5e","Type":"ContainerDied","Data":"32f8d40cf7223d47fd8efd65544ddc68d2d95ab53d18a8e7c7f70f5beb2203ae"} Dec 08 21:22:08 crc kubenswrapper[4912]: I1208 21:22:08.285158 4912 generic.go:334] "Generic (PLEG): container finished" podID="c56535e8-663a-43a9-b596-79a4d3ac0403" containerID="84b066d4828075bbce39a5ac3c27cd38321a9a49c5fec894a04658847c0c4127" exitCode=0 Dec 08 21:22:08 crc kubenswrapper[4912]: I1208 21:22:08.285265 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2x2xd" event={"ID":"c56535e8-663a-43a9-b596-79a4d3ac0403","Type":"ContainerDied","Data":"84b066d4828075bbce39a5ac3c27cd38321a9a49c5fec894a04658847c0c4127"} Dec 08 21:22:08 crc kubenswrapper[4912]: I1208 21:22:08.288856 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"b70b35a710e0841421668ee91c5fb87a0a160d5cb5d16fa7a42c84bc217e8960"} Dec 08 21:22:08 crc kubenswrapper[4912]: I1208 21:22:08.291316 4912 generic.go:334] "Generic (PLEG): container finished" podID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" containerID="8dc7c0881dbec88e4158a433c0edef79ea24cfd9e20c4adb7f24ac90bede5c8a" exitCode=0 Dec 08 21:22:08 crc kubenswrapper[4912]: I1208 21:22:08.291367 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w779q" event={"ID":"7c2d0a46-44cd-4e4d-844c-99ab171020a8","Type":"ContainerDied","Data":"8dc7c0881dbec88e4158a433c0edef79ea24cfd9e20c4adb7f24ac90bede5c8a"} Dec 08 21:22:09 crc kubenswrapper[4912]: I1208 21:22:09.299711 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w779q" event={"ID":"7c2d0a46-44cd-4e4d-844c-99ab171020a8","Type":"ContainerStarted","Data":"8196c14848c2a06ae98243d876ca5d385218d4d6c66a920fe45ddc634d49bea7"} Dec 08 21:22:09 crc kubenswrapper[4912]: I1208 21:22:09.302286 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pk9xl" event={"ID":"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9","Type":"ContainerStarted","Data":"9e7b810ec7a0ad03248688550362fa1906d0e54430c7f1d8b78717ea64115ca0"} Dec 08 21:22:09 crc kubenswrapper[4912]: I1208 21:22:09.304550 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2x2xd" event={"ID":"c56535e8-663a-43a9-b596-79a4d3ac0403","Type":"ContainerStarted","Data":"ff9b26030fbd2747a1d60dfe49ae1c286369d718dbcf641b7bbc448b20fcbab3"} Dec 08 21:22:09 crc kubenswrapper[4912]: I1208 21:22:09.306820 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2k2fc" event={"ID":"570435e6-b620-4b1c-8f4b-47b36f3bee5e","Type":"ContainerStarted","Data":"271b5d86ee2f8cbcfd548439f502d51f93173c17221151f467ce4e2e2f775f43"} Dec 08 21:22:09 crc kubenswrapper[4912]: I1208 21:22:09.332101 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w779q" podStartSLOduration=6.84325094 podStartE2EDuration="1m3.332076661s" podCreationTimestamp="2025-12-08 21:21:06 +0000 UTC" firstStartedPulling="2025-12-08 21:21:12.17070477 +0000 UTC m=+154.033706853" lastFinishedPulling="2025-12-08 21:22:08.659530491 +0000 UTC m=+210.522532574" observedRunningTime="2025-12-08 21:22:09.330634187 +0000 UTC m=+211.193636270" watchObservedRunningTime="2025-12-08 21:22:09.332076661 +0000 UTC m=+211.195078744" Dec 08 21:22:09 crc kubenswrapper[4912]: I1208 21:22:09.348210 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pk9xl" podStartSLOduration=5.569869497 podStartE2EDuration="1m5.348189224s" podCreationTimestamp="2025-12-08 21:21:04 +0000 UTC" firstStartedPulling="2025-12-08 21:21:08.916687448 +0000 UTC m=+150.779689531" lastFinishedPulling="2025-12-08 21:22:08.695007175 +0000 UTC m=+210.558009258" observedRunningTime="2025-12-08 21:22:09.346919254 +0000 UTC m=+211.209921347" watchObservedRunningTime="2025-12-08 21:22:09.348189224 +0000 UTC m=+211.211191307" Dec 08 21:22:09 crc kubenswrapper[4912]: I1208 21:22:09.370352 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2x2xd" podStartSLOduration=3.924983751 podStartE2EDuration="1m6.370324161s" podCreationTimestamp="2025-12-08 21:21:03 +0000 UTC" firstStartedPulling="2025-12-08 21:21:06.414537257 +0000 UTC m=+148.277539340" lastFinishedPulling="2025-12-08 21:22:08.859877667 +0000 UTC m=+210.722879750" observedRunningTime="2025-12-08 21:22:09.365804993 +0000 UTC m=+211.228807086" watchObservedRunningTime="2025-12-08 21:22:09.370324161 +0000 UTC m=+211.233326244" Dec 08 21:22:09 crc kubenswrapper[4912]: I1208 21:22:09.387280 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2k2fc" podStartSLOduration=6.54012925 podStartE2EDuration="1m6.387260314s" podCreationTimestamp="2025-12-08 21:21:03 +0000 UTC" firstStartedPulling="2025-12-08 21:21:08.916865192 +0000 UTC m=+150.779867275" lastFinishedPulling="2025-12-08 21:22:08.763996256 +0000 UTC m=+210.626998339" observedRunningTime="2025-12-08 21:22:09.383439343 +0000 UTC m=+211.246441416" watchObservedRunningTime="2025-12-08 21:22:09.387260314 +0000 UTC m=+211.250262397" Dec 08 21:22:14 crc kubenswrapper[4912]: I1208 21:22:14.215573 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:22:14 crc kubenswrapper[4912]: I1208 21:22:14.216343 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:22:14 crc kubenswrapper[4912]: I1208 21:22:14.269736 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:22:14 crc kubenswrapper[4912]: I1208 21:22:14.380016 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:22:14 crc kubenswrapper[4912]: I1208 21:22:14.403250 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:22:14 crc kubenswrapper[4912]: I1208 21:22:14.403331 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:22:14 crc kubenswrapper[4912]: I1208 21:22:14.503860 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:22:14 crc kubenswrapper[4912]: I1208 21:22:14.745327 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:22:14 crc kubenswrapper[4912]: I1208 21:22:14.745699 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:22:14 crc kubenswrapper[4912]: I1208 21:22:14.781987 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:22:15 crc kubenswrapper[4912]: I1208 21:22:15.395420 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:22:15 crc kubenswrapper[4912]: I1208 21:22:15.425673 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:22:16 crc kubenswrapper[4912]: I1208 21:22:16.638772 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:22:16 crc kubenswrapper[4912]: I1208 21:22:16.639110 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:22:16 crc kubenswrapper[4912]: I1208 21:22:16.692119 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:22:16 crc kubenswrapper[4912]: I1208 21:22:16.832880 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pk9xl"] Dec 08 21:22:17 crc kubenswrapper[4912]: I1208 21:22:17.407761 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:22:18 crc kubenswrapper[4912]: I1208 21:22:18.364957 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pk9xl" podUID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" containerName="registry-server" containerID="cri-o://9e7b810ec7a0ad03248688550362fa1906d0e54430c7f1d8b78717ea64115ca0" gracePeriod=2 Dec 08 21:22:19 crc kubenswrapper[4912]: I1208 21:22:19.035904 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w779q"] Dec 08 21:22:19 crc kubenswrapper[4912]: I1208 21:22:19.369636 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-w779q" podUID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" containerName="registry-server" containerID="cri-o://8196c14848c2a06ae98243d876ca5d385218d4d6c66a920fe45ddc634d49bea7" gracePeriod=2 Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.378651 4912 generic.go:334] "Generic (PLEG): container finished" podID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" containerID="9e7b810ec7a0ad03248688550362fa1906d0e54430c7f1d8b78717ea64115ca0" exitCode=0 Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.378757 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pk9xl" event={"ID":"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9","Type":"ContainerDied","Data":"9e7b810ec7a0ad03248688550362fa1906d0e54430c7f1d8b78717ea64115ca0"} Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.381464 4912 generic.go:334] "Generic (PLEG): container finished" podID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" containerID="8196c14848c2a06ae98243d876ca5d385218d4d6c66a920fe45ddc634d49bea7" exitCode=0 Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.381536 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w779q" event={"ID":"7c2d0a46-44cd-4e4d-844c-99ab171020a8","Type":"ContainerDied","Data":"8196c14848c2a06ae98243d876ca5d385218d4d6c66a920fe45ddc634d49bea7"} Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.581557 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.725619 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfc8t\" (UniqueName: \"kubernetes.io/projected/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-kube-api-access-kfc8t\") pod \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\" (UID: \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\") " Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.725683 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-utilities\") pod \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\" (UID: \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\") " Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.725707 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-catalog-content\") pod \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\" (UID: \"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9\") " Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.728145 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-utilities" (OuterVolumeSpecName: "utilities") pod "6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" (UID: "6f4e34c7-0a83-44db-8ee4-b7ccac967bc9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.747185 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-kube-api-access-kfc8t" (OuterVolumeSpecName: "kube-api-access-kfc8t") pod "6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" (UID: "6f4e34c7-0a83-44db-8ee4-b7ccac967bc9"). InnerVolumeSpecName "kube-api-access-kfc8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.776946 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" (UID: "6f4e34c7-0a83-44db-8ee4-b7ccac967bc9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.826553 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfc8t\" (UniqueName: \"kubernetes.io/projected/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-kube-api-access-kfc8t\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.826595 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.826606 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:20 crc kubenswrapper[4912]: I1208 21:22:20.943387 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.130741 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c2d0a46-44cd-4e4d-844c-99ab171020a8-utilities\") pod \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\" (UID: \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\") " Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.130966 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kmcq\" (UniqueName: \"kubernetes.io/projected/7c2d0a46-44cd-4e4d-844c-99ab171020a8-kube-api-access-5kmcq\") pod \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\" (UID: \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\") " Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.131107 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c2d0a46-44cd-4e4d-844c-99ab171020a8-catalog-content\") pod \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\" (UID: \"7c2d0a46-44cd-4e4d-844c-99ab171020a8\") " Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.131691 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c2d0a46-44cd-4e4d-844c-99ab171020a8-utilities" (OuterVolumeSpecName: "utilities") pod "7c2d0a46-44cd-4e4d-844c-99ab171020a8" (UID: "7c2d0a46-44cd-4e4d-844c-99ab171020a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.136555 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c2d0a46-44cd-4e4d-844c-99ab171020a8-kube-api-access-5kmcq" (OuterVolumeSpecName: "kube-api-access-5kmcq") pod "7c2d0a46-44cd-4e4d-844c-99ab171020a8" (UID: "7c2d0a46-44cd-4e4d-844c-99ab171020a8"). InnerVolumeSpecName "kube-api-access-5kmcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.151028 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c2d0a46-44cd-4e4d-844c-99ab171020a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7c2d0a46-44cd-4e4d-844c-99ab171020a8" (UID: "7c2d0a46-44cd-4e4d-844c-99ab171020a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.232553 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kmcq\" (UniqueName: \"kubernetes.io/projected/7c2d0a46-44cd-4e4d-844c-99ab171020a8-kube-api-access-5kmcq\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.232594 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c2d0a46-44cd-4e4d-844c-99ab171020a8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.232608 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c2d0a46-44cd-4e4d-844c-99ab171020a8-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.395229 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w779q" event={"ID":"7c2d0a46-44cd-4e4d-844c-99ab171020a8","Type":"ContainerDied","Data":"6ed10000db890c730abbfcc1f181616da89c12e38b573b52a9160884aa59fa58"} Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.395272 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w779q" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.395296 4912 scope.go:117] "RemoveContainer" containerID="8196c14848c2a06ae98243d876ca5d385218d4d6c66a920fe45ddc634d49bea7" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.400104 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pk9xl" event={"ID":"6f4e34c7-0a83-44db-8ee4-b7ccac967bc9","Type":"ContainerDied","Data":"7c81e1d96938580f89dff423fa095e1e73449bdd6929fcb15a710474eba1905c"} Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.400190 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pk9xl" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.428815 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w779q"] Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.431361 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-w779q"] Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.432905 4912 scope.go:117] "RemoveContainer" containerID="8dc7c0881dbec88e4158a433c0edef79ea24cfd9e20c4adb7f24ac90bede5c8a" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.445128 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pk9xl"] Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.447989 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pk9xl"] Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.471760 4912 scope.go:117] "RemoveContainer" containerID="5babc46824a3cfa6aeade81b93695f2cb83850927c770ed32fd9cd8bd7cc30e7" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.489533 4912 scope.go:117] "RemoveContainer" containerID="9e7b810ec7a0ad03248688550362fa1906d0e54430c7f1d8b78717ea64115ca0" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.504095 4912 scope.go:117] "RemoveContainer" containerID="f50ca9c38c9229d387ed4eef5040abdbbf10482c43ed2a5cd84741222cc335bd" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.517758 4912 scope.go:117] "RemoveContainer" containerID="5a7b13b061d872cc9187646fb6c4c9340017322c598f627c2691519d2960fc98" Dec 08 21:22:21 crc kubenswrapper[4912]: I1208 21:22:21.666881 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" podUID="c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" containerName="oauth-openshift" containerID="cri-o://1bcbae8b0dcd7437c23da7759fd09c13dcc620a750190682ba6496b449b01dd4" gracePeriod=15 Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.408943 4912 generic.go:334] "Generic (PLEG): container finished" podID="c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" containerID="1bcbae8b0dcd7437c23da7759fd09c13dcc620a750190682ba6496b449b01dd4" exitCode=0 Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.409097 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" event={"ID":"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21","Type":"ContainerDied","Data":"1bcbae8b0dcd7437c23da7759fd09c13dcc620a750190682ba6496b449b01dd4"} Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.437413 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" path="/var/lib/kubelet/pods/6f4e34c7-0a83-44db-8ee4-b7ccac967bc9/volumes" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.438367 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" path="/var/lib/kubelet/pods/7c2d0a46-44cd-4e4d-844c-99ab171020a8/volumes" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.769023 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956139 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-audit-policies\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956187 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8nkl\" (UniqueName: \"kubernetes.io/projected/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-kube-api-access-t8nkl\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956215 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-router-certs\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956236 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-service-ca\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956256 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-trusted-ca-bundle\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956278 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-session\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956312 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-error\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956353 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-idp-0-file-data\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956381 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-audit-dir\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956407 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-login\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956436 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-serving-cert\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956462 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-provider-selection\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956501 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-ocp-branding-template\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.956554 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-cliconfig\") pod \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\" (UID: \"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21\") " Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.957118 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.957604 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.958193 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.958968 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.959160 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.962443 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.962784 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.962781 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-kube-api-access-t8nkl" (OuterVolumeSpecName: "kube-api-access-t8nkl") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "kube-api-access-t8nkl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.963102 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.969346 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.969563 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.970351 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.970665 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:22:22 crc kubenswrapper[4912]: I1208 21:22:22.970716 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" (UID: "c2e289bf-8031-4ba8-95d0-71e4fc9a4f21"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060552 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060635 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060677 4912 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060702 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060717 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060754 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060770 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060789 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060802 4912 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060838 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8nkl\" (UniqueName: \"kubernetes.io/projected/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-kube-api-access-t8nkl\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060854 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060872 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060887 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.060923 4912 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.420627 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" event={"ID":"c2e289bf-8031-4ba8-95d0-71e4fc9a4f21","Type":"ContainerDied","Data":"b0519213ccd8bac7314cf5f9de2d69da7aca13b8460647b58e22522bac3cd839"} Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.420706 4912 scope.go:117] "RemoveContainer" containerID="1bcbae8b0dcd7437c23da7759fd09c13dcc620a750190682ba6496b449b01dd4" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.421123 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wt7vh" Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.451010 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wt7vh"] Dec 08 21:22:23 crc kubenswrapper[4912]: I1208 21:22:23.459381 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wt7vh"] Dec 08 21:22:24 crc kubenswrapper[4912]: I1208 21:22:24.433755 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" path="/var/lib/kubelet/pods/c2e289bf-8031-4ba8-95d0-71e4fc9a4f21/volumes" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.363690 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-78788b65b9-6m54j"] Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364708 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f8e72c9-ed46-4394-b36c-64ae0b735077" containerName="registry-server" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364721 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f8e72c9-ed46-4394-b36c-64ae0b735077" containerName="registry-server" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364732 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" containerName="registry-server" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364739 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" containerName="registry-server" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364752 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="139572d3-34d8-4dc2-ac47-d2597bdfb3ec" containerName="registry-server" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364758 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="139572d3-34d8-4dc2-ac47-d2597bdfb3ec" containerName="registry-server" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364768 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28edb1a1-2bc6-48a1-b115-08ada97d1e4d" containerName="pruner" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364774 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="28edb1a1-2bc6-48a1-b115-08ada97d1e4d" containerName="pruner" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364784 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" containerName="extract-utilities" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364790 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" containerName="extract-utilities" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364799 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" containerName="extract-content" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364805 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" containerName="extract-content" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364814 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" containerName="extract-content" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364822 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" containerName="extract-content" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364836 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f8e72c9-ed46-4394-b36c-64ae0b735077" containerName="extract-utilities" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364842 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f8e72c9-ed46-4394-b36c-64ae0b735077" containerName="extract-utilities" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364853 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" containerName="oauth-openshift" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364860 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" containerName="oauth-openshift" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364874 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" containerName="registry-server" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364885 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" containerName="registry-server" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364897 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="139572d3-34d8-4dc2-ac47-d2597bdfb3ec" containerName="extract-utilities" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364905 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="139572d3-34d8-4dc2-ac47-d2597bdfb3ec" containerName="extract-utilities" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364914 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="139572d3-34d8-4dc2-ac47-d2597bdfb3ec" containerName="extract-content" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364921 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="139572d3-34d8-4dc2-ac47-d2597bdfb3ec" containerName="extract-content" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364930 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f8e72c9-ed46-4394-b36c-64ae0b735077" containerName="extract-content" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364936 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f8e72c9-ed46-4394-b36c-64ae0b735077" containerName="extract-content" Dec 08 21:22:29 crc kubenswrapper[4912]: E1208 21:22:29.364946 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" containerName="extract-utilities" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.364952 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" containerName="extract-utilities" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.365060 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2e289bf-8031-4ba8-95d0-71e4fc9a4f21" containerName="oauth-openshift" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.365073 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="139572d3-34d8-4dc2-ac47-d2597bdfb3ec" containerName="registry-server" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.365083 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c2d0a46-44cd-4e4d-844c-99ab171020a8" containerName="registry-server" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.365092 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f8e72c9-ed46-4394-b36c-64ae0b735077" containerName="registry-server" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.365102 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f4e34c7-0a83-44db-8ee4-b7ccac967bc9" containerName="registry-server" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.365112 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="28edb1a1-2bc6-48a1-b115-08ada97d1e4d" containerName="pruner" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.365579 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.371207 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.371461 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.373625 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.375172 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.375183 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.375463 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.375818 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.376050 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.376176 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.376310 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.376695 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.377234 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.382563 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.385923 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.395068 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-78788b65b9-6m54j"] Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.397337 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529442 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-cliconfig\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529537 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2f4w\" (UniqueName: \"kubernetes.io/projected/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-kube-api-access-s2f4w\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529574 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-user-template-login\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529607 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529639 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529666 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529691 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-session\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529718 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-service-ca\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529741 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-user-template-error\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529766 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-audit-policies\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529789 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-router-certs\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529824 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-serving-cert\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529850 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.529877 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-audit-dir\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.631677 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-user-template-login\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.631740 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.631762 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.631781 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.631805 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-session\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.631823 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-service-ca\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.631845 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-user-template-error\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.631884 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-audit-policies\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.631906 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-router-certs\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.631937 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-serving-cert\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.631960 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-audit-dir\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.631978 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.632027 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-cliconfig\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.632073 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2f4w\" (UniqueName: \"kubernetes.io/projected/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-kube-api-access-s2f4w\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.632755 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-service-ca\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.633049 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-audit-dir\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.633552 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-cliconfig\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.634123 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-audit-policies\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.634592 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.640018 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-user-template-error\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.640251 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.643219 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-router-certs\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.643569 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-serving-cert\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.644115 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-user-template-login\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.647239 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.647899 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.649616 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-v4-0-config-system-session\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.653946 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2f4w\" (UniqueName: \"kubernetes.io/projected/1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903-kube-api-access-s2f4w\") pod \"oauth-openshift-78788b65b9-6m54j\" (UID: \"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903\") " pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:29 crc kubenswrapper[4912]: I1208 21:22:29.685961 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:30 crc kubenswrapper[4912]: I1208 21:22:30.116647 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-78788b65b9-6m54j"] Dec 08 21:22:30 crc kubenswrapper[4912]: I1208 21:22:30.465442 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" event={"ID":"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903","Type":"ContainerStarted","Data":"2f830ac9f466c190f405ad504311a5cbd1ffbbaa5616db31f11414fdd307609e"} Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.287218 4912 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.289253 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.289539 4912 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.289611 4912 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.289898 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8" gracePeriod=15 Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.290339 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290369 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.290385 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290393 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.290403 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290409 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.290421 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290428 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.290440 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290446 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.290454 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290459 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.290467 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290474 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290591 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290611 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290620 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290629 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290635 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290643 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.290936 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b" gracePeriod=15 Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.291260 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47" gracePeriod=15 Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.291364 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e" gracePeriod=15 Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.291570 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2" gracePeriod=15 Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.459853 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.459935 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.459980 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.460179 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.460275 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.460302 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.460328 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.460352 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.471983 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" event={"ID":"1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903","Type":"ContainerStarted","Data":"ec6d1b48ce27344d29ea4de244997a1b0ecfdfbe13bb87fc6b9bad82ccd9d2d2"} Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.472857 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.473704 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.474447 4912 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.479557 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.480080 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.480424 4912 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.560946 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.561017 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.561078 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.561308 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.561379 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.561443 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.561498 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.561552 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.562317 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.562376 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.562409 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.562437 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.562720 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.563115 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.563150 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.563334 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.914229 4912 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.914624 4912 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.914970 4912 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.915517 4912 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.915908 4912 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:31 crc kubenswrapper[4912]: I1208 21:22:31.915988 4912 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 08 21:22:31 crc kubenswrapper[4912]: E1208 21:22:31.916369 4912 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="200ms" Dec 08 21:22:32 crc kubenswrapper[4912]: E1208 21:22:32.117482 4912 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="400ms" Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.211375 4912 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.211464 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.483449 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.486015 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.487205 4912 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b" exitCode=0 Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.487277 4912 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2" exitCode=0 Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.487304 4912 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47" exitCode=0 Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.487360 4912 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e" exitCode=2 Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.487359 4912 scope.go:117] "RemoveContainer" containerID="2e981588f3db5a497b2605d6cbd5436bb4a3579f05aab689f7780bf1af6ed206" Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.490803 4912 generic.go:334] "Generic (PLEG): container finished" podID="16751926-fba8-4f6d-9e55-09ed0751ff06" containerID="4a8f0b017a79c5487642df145c3564d39c605004a05f680b9b9e51bac2a299f5" exitCode=0 Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.490929 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"16751926-fba8-4f6d-9e55-09ed0751ff06","Type":"ContainerDied","Data":"4a8f0b017a79c5487642df145c3564d39c605004a05f680b9b9e51bac2a299f5"} Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.492178 4912 status_manager.go:851] "Failed to get status for pod" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:32 crc kubenswrapper[4912]: I1208 21:22:32.492601 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:32 crc kubenswrapper[4912]: E1208 21:22:32.518953 4912 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="800ms" Dec 08 21:22:33 crc kubenswrapper[4912]: E1208 21:22:33.320836 4912 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="1.6s" Dec 08 21:22:33 crc kubenswrapper[4912]: E1208 21:22:33.491628 4912 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" volumeName="registry-storage" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.504612 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.742861 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.745011 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.745691 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.745935 4912 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.746263 4912 status_manager.go:851] "Failed to get status for pod" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.751219 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.751954 4912 status_manager.go:851] "Failed to get status for pod" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.752542 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.753074 4912 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.896264 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.896371 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/16751926-fba8-4f6d-9e55-09ed0751ff06-var-lock\") pod \"16751926-fba8-4f6d-9e55-09ed0751ff06\" (UID: \"16751926-fba8-4f6d-9e55-09ed0751ff06\") " Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.896439 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.896478 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/16751926-fba8-4f6d-9e55-09ed0751ff06-kube-api-access\") pod \"16751926-fba8-4f6d-9e55-09ed0751ff06\" (UID: \"16751926-fba8-4f6d-9e55-09ed0751ff06\") " Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.896497 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.896474 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.896517 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/16751926-fba8-4f6d-9e55-09ed0751ff06-kubelet-dir\") pod \"16751926-fba8-4f6d-9e55-09ed0751ff06\" (UID: \"16751926-fba8-4f6d-9e55-09ed0751ff06\") " Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.896555 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/16751926-fba8-4f6d-9e55-09ed0751ff06-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "16751926-fba8-4f6d-9e55-09ed0751ff06" (UID: "16751926-fba8-4f6d-9e55-09ed0751ff06"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.896558 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/16751926-fba8-4f6d-9e55-09ed0751ff06-var-lock" (OuterVolumeSpecName: "var-lock") pod "16751926-fba8-4f6d-9e55-09ed0751ff06" (UID: "16751926-fba8-4f6d-9e55-09ed0751ff06"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.896582 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.896618 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.897312 4912 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/16751926-fba8-4f6d-9e55-09ed0751ff06-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.897332 4912 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.897344 4912 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/16751926-fba8-4f6d-9e55-09ed0751ff06-var-lock\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.897358 4912 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.897370 4912 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.903766 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16751926-fba8-4f6d-9e55-09ed0751ff06-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "16751926-fba8-4f6d-9e55-09ed0751ff06" (UID: "16751926-fba8-4f6d-9e55-09ed0751ff06"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:22:33 crc kubenswrapper[4912]: I1208 21:22:33.998873 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/16751926-fba8-4f6d-9e55-09ed0751ff06-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.447571 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.519807 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.520923 4912 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8" exitCode=0 Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.521087 4912 scope.go:117] "RemoveContainer" containerID="df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.521104 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.521921 4912 status_manager.go:851] "Failed to get status for pod" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.522402 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.522722 4912 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.524859 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.525134 4912 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.525504 4912 status_manager.go:851] "Failed to get status for pod" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.526199 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"16751926-fba8-4f6d-9e55-09ed0751ff06","Type":"ContainerDied","Data":"c4af4f45151e31c39ab327c02ee007d1ebde7ff081c200fcaf1cbe1baa0169b8"} Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.526243 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4af4f45151e31c39ab327c02ee007d1ebde7ff081c200fcaf1cbe1baa0169b8" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.526328 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.532538 4912 status_manager.go:851] "Failed to get status for pod" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.532989 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.533487 4912 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.540146 4912 scope.go:117] "RemoveContainer" containerID="d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.556825 4912 scope.go:117] "RemoveContainer" containerID="a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.576092 4912 scope.go:117] "RemoveContainer" containerID="9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.595271 4912 scope.go:117] "RemoveContainer" containerID="eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.613060 4912 scope.go:117] "RemoveContainer" containerID="ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.641789 4912 scope.go:117] "RemoveContainer" containerID="df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b" Dec 08 21:22:34 crc kubenswrapper[4912]: E1208 21:22:34.643007 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\": container with ID starting with df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b not found: ID does not exist" containerID="df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.643074 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b"} err="failed to get container status \"df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\": rpc error: code = NotFound desc = could not find container \"df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b\": container with ID starting with df57a8d8164890de4d6c6338e2062614806f4f46868b596c29a64087aa7b770b not found: ID does not exist" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.643104 4912 scope.go:117] "RemoveContainer" containerID="d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2" Dec 08 21:22:34 crc kubenswrapper[4912]: E1208 21:22:34.644532 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\": container with ID starting with d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2 not found: ID does not exist" containerID="d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.644560 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2"} err="failed to get container status \"d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\": rpc error: code = NotFound desc = could not find container \"d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2\": container with ID starting with d4a6a5bce42bd3bd1f51c141e9ba14aba0def513024d8536b0a1cf2cb453eef2 not found: ID does not exist" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.644600 4912 scope.go:117] "RemoveContainer" containerID="a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47" Dec 08 21:22:34 crc kubenswrapper[4912]: E1208 21:22:34.644943 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\": container with ID starting with a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47 not found: ID does not exist" containerID="a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.644963 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47"} err="failed to get container status \"a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\": rpc error: code = NotFound desc = could not find container \"a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47\": container with ID starting with a37e8ef7116ab154fd669e0f84be5eb91b4fdac3e7e22102c72b16876b06ba47 not found: ID does not exist" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.644977 4912 scope.go:117] "RemoveContainer" containerID="9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e" Dec 08 21:22:34 crc kubenswrapper[4912]: E1208 21:22:34.645295 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\": container with ID starting with 9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e not found: ID does not exist" containerID="9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.645318 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e"} err="failed to get container status \"9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\": rpc error: code = NotFound desc = could not find container \"9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e\": container with ID starting with 9d86b4ac1b3fa547c70c30f9f6bfd15a8278b2a12c8ef307fa20825d5a1c887e not found: ID does not exist" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.645337 4912 scope.go:117] "RemoveContainer" containerID="eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8" Dec 08 21:22:34 crc kubenswrapper[4912]: E1208 21:22:34.645578 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\": container with ID starting with eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8 not found: ID does not exist" containerID="eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.645628 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8"} err="failed to get container status \"eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\": rpc error: code = NotFound desc = could not find container \"eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8\": container with ID starting with eeccce26ca1fe4a067b6cf12e5e921dd4e4014386813faf806a94910443bb7d8 not found: ID does not exist" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.645649 4912 scope.go:117] "RemoveContainer" containerID="ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087" Dec 08 21:22:34 crc kubenswrapper[4912]: E1208 21:22:34.646011 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\": container with ID starting with ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087 not found: ID does not exist" containerID="ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087" Dec 08 21:22:34 crc kubenswrapper[4912]: I1208 21:22:34.646069 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087"} err="failed to get container status \"ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\": rpc error: code = NotFound desc = could not find container \"ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087\": container with ID starting with ba471df15f19f58f7ab52171a0f1e420fdf7d10cb809bbb471874eb1463f8087 not found: ID does not exist" Dec 08 21:22:34 crc kubenswrapper[4912]: E1208 21:22:34.923536 4912 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="3.2s" Dec 08 21:22:36 crc kubenswrapper[4912]: E1208 21:22:36.331516 4912 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:36 crc kubenswrapper[4912]: I1208 21:22:36.332230 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:36 crc kubenswrapper[4912]: E1208 21:22:36.364864 4912 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.224:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187f5a621b4ac1e5 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-08 21:22:36.363702757 +0000 UTC m=+238.226704840,LastTimestamp:2025-12-08 21:22:36.363702757 +0000 UTC m=+238.226704840,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 08 21:22:36 crc kubenswrapper[4912]: I1208 21:22:36.550554 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"7fd07760e88be2967bd07e0b67d920c5ae0f62174f630b31576aec93cc8ac12c"} Dec 08 21:22:36 crc kubenswrapper[4912]: E1208 21:22:36.859674 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:22:36Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:22:36Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:22:36Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T21:22:36Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:36 crc kubenswrapper[4912]: E1208 21:22:36.859913 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:36 crc kubenswrapper[4912]: E1208 21:22:36.860138 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:36 crc kubenswrapper[4912]: E1208 21:22:36.860282 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:36 crc kubenswrapper[4912]: E1208 21:22:36.860482 4912 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:36 crc kubenswrapper[4912]: E1208 21:22:36.860507 4912 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 21:22:37 crc kubenswrapper[4912]: I1208 21:22:37.557859 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"b408a16c95431b2765a9cfb669e18690d3813f816259682d2dc71b377ff21e90"} Dec 08 21:22:37 crc kubenswrapper[4912]: E1208 21:22:37.559009 4912 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:37 crc kubenswrapper[4912]: I1208 21:22:37.559629 4912 status_manager.go:851] "Failed to get status for pod" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:37 crc kubenswrapper[4912]: I1208 21:22:37.560069 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:38 crc kubenswrapper[4912]: E1208 21:22:38.126091 4912 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="6.4s" Dec 08 21:22:38 crc kubenswrapper[4912]: I1208 21:22:38.430300 4912 status_manager.go:851] "Failed to get status for pod" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:38 crc kubenswrapper[4912]: I1208 21:22:38.430885 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:38 crc kubenswrapper[4912]: E1208 21:22:38.564726 4912 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:22:43 crc kubenswrapper[4912]: I1208 21:22:43.600348 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 08 21:22:43 crc kubenswrapper[4912]: I1208 21:22:43.601155 4912 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6" exitCode=1 Dec 08 21:22:43 crc kubenswrapper[4912]: I1208 21:22:43.601193 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6"} Dec 08 21:22:43 crc kubenswrapper[4912]: I1208 21:22:43.601731 4912 scope.go:117] "RemoveContainer" containerID="2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6" Dec 08 21:22:43 crc kubenswrapper[4912]: I1208 21:22:43.602967 4912 status_manager.go:851] "Failed to get status for pod" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:43 crc kubenswrapper[4912]: I1208 21:22:43.603720 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:43 crc kubenswrapper[4912]: I1208 21:22:43.604492 4912 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.427321 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.428795 4912 status_manager.go:851] "Failed to get status for pod" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.429315 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.429857 4912 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.449057 4912 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c864447a-ffce-41e4-8c64-1d56bb049b43" Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.449108 4912 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c864447a-ffce-41e4-8c64-1d56bb049b43" Dec 08 21:22:44 crc kubenswrapper[4912]: E1208 21:22:44.449859 4912 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.450850 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:44 crc kubenswrapper[4912]: E1208 21:22:44.527880 4912 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="7s" Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.612251 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"dfdc748e01d036db7e5a14f1201795dcde2ded65362f5c1a21ad551920a7c427"} Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.618027 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.618088 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"875b975640e8309ceb7fbd66cd67bd086352f87ef95b349875b9c64323e0f90d"} Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.619056 4912 status_manager.go:851] "Failed to get status for pod" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.619655 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:44 crc kubenswrapper[4912]: I1208 21:22:44.620564 4912 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:45 crc kubenswrapper[4912]: I1208 21:22:45.628772 4912 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="acabc3fef0f5ad6812ff08561f33c940279fb944a6bf2b56ea8c41099b956619" exitCode=0 Dec 08 21:22:45 crc kubenswrapper[4912]: I1208 21:22:45.628940 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"acabc3fef0f5ad6812ff08561f33c940279fb944a6bf2b56ea8c41099b956619"} Dec 08 21:22:45 crc kubenswrapper[4912]: I1208 21:22:45.629410 4912 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c864447a-ffce-41e4-8c64-1d56bb049b43" Dec 08 21:22:45 crc kubenswrapper[4912]: I1208 21:22:45.629444 4912 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c864447a-ffce-41e4-8c64-1d56bb049b43" Dec 08 21:22:45 crc kubenswrapper[4912]: E1208 21:22:45.630110 4912 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:45 crc kubenswrapper[4912]: I1208 21:22:45.630147 4912 status_manager.go:851] "Failed to get status for pod" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:45 crc kubenswrapper[4912]: I1208 21:22:45.630566 4912 status_manager.go:851] "Failed to get status for pod" podUID="1b81a2f9-fb3f-4e0e-bbc3-2f58392e2903" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-78788b65b9-6m54j\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:45 crc kubenswrapper[4912]: I1208 21:22:45.630937 4912 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Dec 08 21:22:45 crc kubenswrapper[4912]: E1208 21:22:45.856438 4912 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.224:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187f5a621b4ac1e5 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-08 21:22:36.363702757 +0000 UTC m=+238.226704840,LastTimestamp:2025-12-08 21:22:36.363702757 +0000 UTC m=+238.226704840,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 08 21:22:46 crc kubenswrapper[4912]: I1208 21:22:46.641509 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d2cb768ca08aedabde593221a6ee1cd2f7fe53d7c4e9e4bbf4ced7e2e14fa886"} Dec 08 21:22:46 crc kubenswrapper[4912]: I1208 21:22:46.642170 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b2965d7cfba0263db1a29e6f62015f6ba71ef8930ab491a9434b1abf75e71978"} Dec 08 21:22:46 crc kubenswrapper[4912]: I1208 21:22:46.642189 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6f22fb63ae66f4fb42c1aadc91ff08ce5499673de0549dff9f9f069a312d82d6"} Dec 08 21:22:46 crc kubenswrapper[4912]: I1208 21:22:46.642200 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3a9de8ba7e86e642955dd0ce08f6ffa8a97ab7fde7b236d798fd550f2484c877"} Dec 08 21:22:47 crc kubenswrapper[4912]: I1208 21:22:47.657795 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"afdd888cd09e11e20e23b4f95174d8724fd3bff6523de75b0b3a99a8f2aa99f1"} Dec 08 21:22:47 crc kubenswrapper[4912]: I1208 21:22:47.658130 4912 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c864447a-ffce-41e4-8c64-1d56bb049b43" Dec 08 21:22:47 crc kubenswrapper[4912]: I1208 21:22:47.658146 4912 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c864447a-ffce-41e4-8c64-1d56bb049b43" Dec 08 21:22:47 crc kubenswrapper[4912]: I1208 21:22:47.658424 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:48 crc kubenswrapper[4912]: I1208 21:22:48.223279 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:22:49 crc kubenswrapper[4912]: I1208 21:22:49.451195 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:49 crc kubenswrapper[4912]: I1208 21:22:49.451885 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:49 crc kubenswrapper[4912]: I1208 21:22:49.458752 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:50 crc kubenswrapper[4912]: I1208 21:22:50.189071 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:22:50 crc kubenswrapper[4912]: I1208 21:22:50.189644 4912 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 08 21:22:50 crc kubenswrapper[4912]: I1208 21:22:50.189694 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 08 21:22:52 crc kubenswrapper[4912]: I1208 21:22:52.678720 4912 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:22:52 crc kubenswrapper[4912]: I1208 21:22:52.804765 4912 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="dc06145c-e1b6-484f-894c-db3bd1bfe915" Dec 08 21:22:53 crc kubenswrapper[4912]: I1208 21:22:53.696653 4912 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c864447a-ffce-41e4-8c64-1d56bb049b43" Dec 08 21:22:53 crc kubenswrapper[4912]: I1208 21:22:53.696709 4912 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c864447a-ffce-41e4-8c64-1d56bb049b43" Dec 08 21:22:53 crc kubenswrapper[4912]: I1208 21:22:53.701320 4912 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="dc06145c-e1b6-484f-894c-db3bd1bfe915" Dec 08 21:23:00 crc kubenswrapper[4912]: I1208 21:23:00.190475 4912 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 08 21:23:00 crc kubenswrapper[4912]: I1208 21:23:00.191379 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 08 21:23:01 crc kubenswrapper[4912]: I1208 21:23:01.689310 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.067571 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.113250 4912 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.113538 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-78788b65b9-6m54j" podStartSLOduration=67.113517414 podStartE2EDuration="1m7.113517414s" podCreationTimestamp="2025-12-08 21:21:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:22:52.732497717 +0000 UTC m=+254.595499800" watchObservedRunningTime="2025-12-08 21:23:03.113517414 +0000 UTC m=+264.976519497" Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.117869 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.117944 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.123925 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.124011 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.147191 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=11.147156382 podStartE2EDuration="11.147156382s" podCreationTimestamp="2025-12-08 21:22:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:23:03.139433188 +0000 UTC m=+265.002435351" watchObservedRunningTime="2025-12-08 21:23:03.147156382 +0000 UTC m=+265.010158505" Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.506882 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.678651 4912 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.679262 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://b408a16c95431b2765a9cfb669e18690d3813f816259682d2dc71b377ff21e90" gracePeriod=5 Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.742547 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.742992 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.896797 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.907693 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.950449 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 08 21:23:03 crc kubenswrapper[4912]: I1208 21:23:03.981905 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 08 21:23:04 crc kubenswrapper[4912]: I1208 21:23:04.078159 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 08 21:23:04 crc kubenswrapper[4912]: I1208 21:23:04.323992 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 08 21:23:04 crc kubenswrapper[4912]: I1208 21:23:04.456379 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 08 21:23:04 crc kubenswrapper[4912]: I1208 21:23:04.527442 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 08 21:23:04 crc kubenswrapper[4912]: I1208 21:23:04.531401 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 08 21:23:04 crc kubenswrapper[4912]: I1208 21:23:04.577965 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 08 21:23:04 crc kubenswrapper[4912]: I1208 21:23:04.589648 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 08 21:23:04 crc kubenswrapper[4912]: I1208 21:23:04.614699 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 08 21:23:04 crc kubenswrapper[4912]: I1208 21:23:04.735512 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 08 21:23:04 crc kubenswrapper[4912]: I1208 21:23:04.825852 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 08 21:23:04 crc kubenswrapper[4912]: I1208 21:23:04.858322 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.001502 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.087700 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.189016 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.283631 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.375657 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.503967 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.590267 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.661701 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.690162 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.693606 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.693778 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.698384 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.877005 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.908641 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.927628 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 08 21:23:05 crc kubenswrapper[4912]: I1208 21:23:05.957121 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.091560 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.164375 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.238665 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.496107 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.603206 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.614955 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.648546 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.656537 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.666640 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.670456 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.818355 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.863885 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.904760 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.950471 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.958166 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.959971 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 08 21:23:06 crc kubenswrapper[4912]: I1208 21:23:06.975321 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.060006 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.085545 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.112326 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.143906 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.144172 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.265301 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.296716 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.323651 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.344150 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.399351 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.441288 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.591683 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.625101 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.662063 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.689571 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.743959 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.750021 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.758920 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.775758 4912 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.822155 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.931920 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 08 21:23:07 crc kubenswrapper[4912]: I1208 21:23:07.980451 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.053241 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.110457 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.433693 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.478378 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.616216 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.669378 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.694087 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.728705 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.786872 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.789601 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.789651 4912 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="b408a16c95431b2765a9cfb669e18690d3813f816259682d2dc71b377ff21e90" exitCode=137 Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.905769 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.943377 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.957293 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 08 21:23:08 crc kubenswrapper[4912]: I1208 21:23:08.998259 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.027814 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.034562 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.094658 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.194996 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.200794 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.211304 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.275853 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.276240 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.314716 4912 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.401652 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.401720 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.401797 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.401833 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.401870 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.401951 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.402723 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.401923 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.405628 4912 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.405652 4912 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.405661 4912 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.411809 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.414517 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.416074 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.501686 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.506071 4912 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.506096 4912 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.589456 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.623979 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.660723 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.796924 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.797005 4912 scope.go:117] "RemoveContainer" containerID="b408a16c95431b2765a9cfb669e18690d3813f816259682d2dc71b377ff21e90" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.797127 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.802548 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.855621 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.916184 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.942920 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 08 21:23:09 crc kubenswrapper[4912]: I1208 21:23:09.996894 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.185625 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.190403 4912 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.190485 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.190576 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.192048 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"875b975640e8309ceb7fbd66cd67bd086352f87ef95b349875b9c64323e0f90d"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.192333 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://875b975640e8309ceb7fbd66cd67bd086352f87ef95b349875b9c64323e0f90d" gracePeriod=30 Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.236253 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.360011 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.438790 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.443308 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.477064 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.546845 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.564334 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.590588 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.602440 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.688279 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.737450 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.763978 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.813290 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.824943 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.928194 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.934054 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 08 21:23:10 crc kubenswrapper[4912]: I1208 21:23:10.970868 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.040096 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.118216 4912 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.178751 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.187115 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.209604 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.221667 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.252477 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.364767 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.366816 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.392609 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.451727 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.463486 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.609212 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.688738 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.720703 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.757149 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 08 21:23:11 crc kubenswrapper[4912]: I1208 21:23:11.777693 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.054309 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.067316 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.095315 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.118689 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.190741 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.199740 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.224806 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.248547 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.364570 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.454965 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.459882 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.513326 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.593579 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.753992 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.782388 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.852986 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.882799 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.907965 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 08 21:23:12 crc kubenswrapper[4912]: I1208 21:23:12.969599 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.014599 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.072914 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.099999 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.222147 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.268013 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.380203 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.391988 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.398650 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.423943 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.531906 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.541141 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.555486 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.585445 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.586067 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.645108 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.668208 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.668903 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.673302 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.686321 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.697237 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.721772 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.770160 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.783433 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.845096 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.847502 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.883420 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.911775 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 08 21:23:13 crc kubenswrapper[4912]: I1208 21:23:13.937752 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.019964 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.080835 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.182561 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.267438 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.282869 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.337345 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.452471 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.481716 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.635324 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.642758 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.655160 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.656601 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.760158 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.839713 4912 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.845346 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 08 21:23:14 crc kubenswrapper[4912]: I1208 21:23:14.973875 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.038116 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.066221 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.087498 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.120539 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.187227 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.216709 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.260314 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.267912 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.304684 4912 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.361981 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.522996 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.535943 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.574340 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.575202 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.579403 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.643781 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.699181 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.714765 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.720964 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.749953 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.930897 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.976276 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 08 21:23:15 crc kubenswrapper[4912]: I1208 21:23:15.990965 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 08 21:23:16 crc kubenswrapper[4912]: I1208 21:23:16.014221 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 08 21:23:16 crc kubenswrapper[4912]: I1208 21:23:16.165004 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 08 21:23:16 crc kubenswrapper[4912]: I1208 21:23:16.210719 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 08 21:23:16 crc kubenswrapper[4912]: I1208 21:23:16.366758 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 08 21:23:16 crc kubenswrapper[4912]: I1208 21:23:16.553000 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 08 21:23:16 crc kubenswrapper[4912]: I1208 21:23:16.622323 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 08 21:23:16 crc kubenswrapper[4912]: I1208 21:23:16.623860 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 08 21:23:16 crc kubenswrapper[4912]: I1208 21:23:16.709264 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 08 21:23:16 crc kubenswrapper[4912]: I1208 21:23:16.881719 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 08 21:23:17 crc kubenswrapper[4912]: I1208 21:23:17.025750 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 08 21:23:17 crc kubenswrapper[4912]: I1208 21:23:17.401783 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 08 21:23:17 crc kubenswrapper[4912]: I1208 21:23:17.464852 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 08 21:23:17 crc kubenswrapper[4912]: I1208 21:23:17.706132 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 08 21:23:17 crc kubenswrapper[4912]: I1208 21:23:17.727265 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 08 21:23:17 crc kubenswrapper[4912]: I1208 21:23:17.761620 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 08 21:23:17 crc kubenswrapper[4912]: I1208 21:23:17.812137 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 08 21:23:17 crc kubenswrapper[4912]: I1208 21:23:17.871205 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 08 21:23:18 crc kubenswrapper[4912]: I1208 21:23:18.062310 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 08 21:23:18 crc kubenswrapper[4912]: I1208 21:23:18.290233 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 21:23:18 crc kubenswrapper[4912]: I1208 21:23:18.290294 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 08 21:23:18 crc kubenswrapper[4912]: I1208 21:23:18.352801 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 08 21:23:18 crc kubenswrapper[4912]: I1208 21:23:18.460655 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 08 21:23:18 crc kubenswrapper[4912]: I1208 21:23:18.470801 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 08 21:23:18 crc kubenswrapper[4912]: I1208 21:23:18.859071 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 08 21:23:19 crc kubenswrapper[4912]: I1208 21:23:19.415331 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 08 21:23:19 crc kubenswrapper[4912]: I1208 21:23:19.996601 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 08 21:23:36 crc kubenswrapper[4912]: I1208 21:23:36.975088 4912 generic.go:334] "Generic (PLEG): container finished" podID="9f40e8be-f418-4be6-912f-d732718730e7" containerID="71906c229188b9c1373923699b33a89a038f1bf2da6626c12ba743ea673cba6a" exitCode=0 Dec 08 21:23:36 crc kubenswrapper[4912]: I1208 21:23:36.975170 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" event={"ID":"9f40e8be-f418-4be6-912f-d732718730e7","Type":"ContainerDied","Data":"71906c229188b9c1373923699b33a89a038f1bf2da6626c12ba743ea673cba6a"} Dec 08 21:23:36 crc kubenswrapper[4912]: I1208 21:23:36.975997 4912 scope.go:117] "RemoveContainer" containerID="71906c229188b9c1373923699b33a89a038f1bf2da6626c12ba743ea673cba6a" Dec 08 21:23:37 crc kubenswrapper[4912]: I1208 21:23:37.993725 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" event={"ID":"9f40e8be-f418-4be6-912f-d732718730e7","Type":"ContainerStarted","Data":"315c41b17ab66359c798610e4cddd1f7dd93b8b83817b2824b14888807a9167a"} Dec 08 21:23:37 crc kubenswrapper[4912]: I1208 21:23:37.994859 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:23:37 crc kubenswrapper[4912]: I1208 21:23:37.997030 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:23:41 crc kubenswrapper[4912]: I1208 21:23:41.012086 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 08 21:23:41 crc kubenswrapper[4912]: I1208 21:23:41.018066 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 08 21:23:41 crc kubenswrapper[4912]: I1208 21:23:41.018146 4912 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="875b975640e8309ceb7fbd66cd67bd086352f87ef95b349875b9c64323e0f90d" exitCode=137 Dec 08 21:23:41 crc kubenswrapper[4912]: I1208 21:23:41.018197 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"875b975640e8309ceb7fbd66cd67bd086352f87ef95b349875b9c64323e0f90d"} Dec 08 21:23:41 crc kubenswrapper[4912]: I1208 21:23:41.018257 4912 scope.go:117] "RemoveContainer" containerID="2031d115a82c6cafcc508f63c30aa0514902d60a6c9f96459918df16b176a6f6" Dec 08 21:23:42 crc kubenswrapper[4912]: I1208 21:23:42.026762 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 08 21:23:42 crc kubenswrapper[4912]: I1208 21:23:42.028453 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2be00d42bf9fd3c658d7280750790362fd004649ec2333159aaa454c1708689f"} Dec 08 21:23:48 crc kubenswrapper[4912]: I1208 21:23:48.222972 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:23:50 crc kubenswrapper[4912]: I1208 21:23:50.189620 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:23:50 crc kubenswrapper[4912]: I1208 21:23:50.194858 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:23:51 crc kubenswrapper[4912]: I1208 21:23:51.087610 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.481749 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-sm5lg"] Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.482673 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" podUID="c5248f2d-ec3c-436a-9c01-5bef4382ca4f" containerName="controller-manager" containerID="cri-o://b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42" gracePeriod=30 Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.485235 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7"] Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.485579 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" podUID="e6c8af1a-fcac-4faa-86b0-780ea6dacb9b" containerName="route-controller-manager" containerID="cri-o://244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae" gracePeriod=30 Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.905645 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.911869 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.918825 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-client-ca\") pod \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.918871 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mbrc5\" (UniqueName: \"kubernetes.io/projected/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-kube-api-access-mbrc5\") pod \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.918902 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-serving-cert\") pod \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.918935 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w259p\" (UniqueName: \"kubernetes.io/projected/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-kube-api-access-w259p\") pod \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.918950 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-serving-cert\") pod \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.918968 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-client-ca\") pod \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.919008 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-config\") pod \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.919028 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-proxy-ca-bundles\") pod \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\" (UID: \"c5248f2d-ec3c-436a-9c01-5bef4382ca4f\") " Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.920355 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-config\") pod \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\" (UID: \"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b\") " Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.920518 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-client-ca" (OuterVolumeSpecName: "client-ca") pod "e6c8af1a-fcac-4faa-86b0-780ea6dacb9b" (UID: "e6c8af1a-fcac-4faa-86b0-780ea6dacb9b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.921936 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-client-ca" (OuterVolumeSpecName: "client-ca") pod "c5248f2d-ec3c-436a-9c01-5bef4382ca4f" (UID: "c5248f2d-ec3c-436a-9c01-5bef4382ca4f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.922026 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-config" (OuterVolumeSpecName: "config") pod "c5248f2d-ec3c-436a-9c01-5bef4382ca4f" (UID: "c5248f2d-ec3c-436a-9c01-5bef4382ca4f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.922135 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-config" (OuterVolumeSpecName: "config") pod "e6c8af1a-fcac-4faa-86b0-780ea6dacb9b" (UID: "e6c8af1a-fcac-4faa-86b0-780ea6dacb9b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.922465 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c5248f2d-ec3c-436a-9c01-5bef4382ca4f" (UID: "c5248f2d-ec3c-436a-9c01-5bef4382ca4f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.977717 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e6c8af1a-fcac-4faa-86b0-780ea6dacb9b" (UID: "e6c8af1a-fcac-4faa-86b0-780ea6dacb9b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.978268 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-kube-api-access-w259p" (OuterVolumeSpecName: "kube-api-access-w259p") pod "c5248f2d-ec3c-436a-9c01-5bef4382ca4f" (UID: "c5248f2d-ec3c-436a-9c01-5bef4382ca4f"). InnerVolumeSpecName "kube-api-access-w259p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.981087 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c5248f2d-ec3c-436a-9c01-5bef4382ca4f" (UID: "c5248f2d-ec3c-436a-9c01-5bef4382ca4f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:23:58 crc kubenswrapper[4912]: I1208 21:23:58.982641 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-kube-api-access-mbrc5" (OuterVolumeSpecName: "kube-api-access-mbrc5") pod "e6c8af1a-fcac-4faa-86b0-780ea6dacb9b" (UID: "e6c8af1a-fcac-4faa-86b0-780ea6dacb9b"). InnerVolumeSpecName "kube-api-access-mbrc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.022692 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w259p\" (UniqueName: \"kubernetes.io/projected/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-kube-api-access-w259p\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.022744 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.022758 4912 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.022769 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.022778 4912 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5248f2d-ec3c-436a-9c01-5bef4382ca4f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.022787 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.022796 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mbrc5\" (UniqueName: \"kubernetes.io/projected/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-kube-api-access-mbrc5\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.022805 4912 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.022813 4912 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.148928 4912 generic.go:334] "Generic (PLEG): container finished" podID="e6c8af1a-fcac-4faa-86b0-780ea6dacb9b" containerID="244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae" exitCode=0 Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.148986 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.149011 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" event={"ID":"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b","Type":"ContainerDied","Data":"244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae"} Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.149173 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7" event={"ID":"e6c8af1a-fcac-4faa-86b0-780ea6dacb9b","Type":"ContainerDied","Data":"2c93304a07b437ad4518ff159b33300b36a27cc3727bc3e403d78a76e7950668"} Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.149200 4912 scope.go:117] "RemoveContainer" containerID="244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.150605 4912 generic.go:334] "Generic (PLEG): container finished" podID="c5248f2d-ec3c-436a-9c01-5bef4382ca4f" containerID="b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42" exitCode=0 Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.150632 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" event={"ID":"c5248f2d-ec3c-436a-9c01-5bef4382ca4f","Type":"ContainerDied","Data":"b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42"} Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.150649 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" event={"ID":"c5248f2d-ec3c-436a-9c01-5bef4382ca4f","Type":"ContainerDied","Data":"df0974cbb551614afc0ec58452916b4fb801900ad4f92898bed555db82e1404d"} Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.150708 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-sm5lg" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.167906 4912 scope.go:117] "RemoveContainer" containerID="244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae" Dec 08 21:23:59 crc kubenswrapper[4912]: E1208 21:23:59.168322 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae\": container with ID starting with 244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae not found: ID does not exist" containerID="244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.168360 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae"} err="failed to get container status \"244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae\": rpc error: code = NotFound desc = could not find container \"244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae\": container with ID starting with 244903e54175dc9851f35c4df1405b3fce48b9b94cdf2f334f06c74c9ca0ccae not found: ID does not exist" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.168392 4912 scope.go:117] "RemoveContainer" containerID="b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.180008 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7"] Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.184963 4912 scope.go:117] "RemoveContainer" containerID="b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.187113 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7llx7"] Dec 08 21:23:59 crc kubenswrapper[4912]: E1208 21:23:59.187605 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42\": container with ID starting with b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42 not found: ID does not exist" containerID="b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.187644 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42"} err="failed to get container status \"b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42\": rpc error: code = NotFound desc = could not find container \"b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42\": container with ID starting with b965e85f4f3630dabcf7ecdc0285d2dd32d54a4a11240627333e15dc06ab5d42 not found: ID does not exist" Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.199728 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-sm5lg"] Dec 08 21:23:59 crc kubenswrapper[4912]: I1208 21:23:59.203386 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-sm5lg"] Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.434940 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5248f2d-ec3c-436a-9c01-5bef4382ca4f" path="/var/lib/kubelet/pods/c5248f2d-ec3c-436a-9c01-5bef4382ca4f/volumes" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.435876 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6c8af1a-fcac-4faa-86b0-780ea6dacb9b" path="/var/lib/kubelet/pods/e6c8af1a-fcac-4faa-86b0-780ea6dacb9b/volumes" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.436374 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp"] Dec 08 21:24:00 crc kubenswrapper[4912]: E1208 21:24:00.436612 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5248f2d-ec3c-436a-9c01-5bef4382ca4f" containerName="controller-manager" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.436629 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5248f2d-ec3c-436a-9c01-5bef4382ca4f" containerName="controller-manager" Dec 08 21:24:00 crc kubenswrapper[4912]: E1208 21:24:00.436648 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.436657 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 08 21:24:00 crc kubenswrapper[4912]: E1208 21:24:00.436677 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6c8af1a-fcac-4faa-86b0-780ea6dacb9b" containerName="route-controller-manager" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.436686 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6c8af1a-fcac-4faa-86b0-780ea6dacb9b" containerName="route-controller-manager" Dec 08 21:24:00 crc kubenswrapper[4912]: E1208 21:24:00.436696 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" containerName="installer" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.436704 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" containerName="installer" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.436830 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5248f2d-ec3c-436a-9c01-5bef4382ca4f" containerName="controller-manager" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.436847 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6c8af1a-fcac-4faa-86b0-780ea6dacb9b" containerName="route-controller-manager" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.436861 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.436875 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="16751926-fba8-4f6d-9e55-09ed0751ff06" containerName="installer" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.437441 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.437491 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4"] Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.438245 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.441090 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.441154 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.441204 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.441321 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cctn2\" (UniqueName: \"kubernetes.io/projected/48f400b8-fff6-40d4-aedc-e58af846c1dc-kube-api-access-cctn2\") pod \"route-controller-manager-6dbcf7d5bc-c72t4\" (UID: \"48f400b8-fff6-40d4-aedc-e58af846c1dc\") " pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.441387 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/48f400b8-fff6-40d4-aedc-e58af846c1dc-client-ca\") pod \"route-controller-manager-6dbcf7d5bc-c72t4\" (UID: \"48f400b8-fff6-40d4-aedc-e58af846c1dc\") " pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.441522 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.441639 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.449824 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4"] Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.444868 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.444871 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.444934 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.444933 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.444999 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.445075 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.441615 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/958a45cd-9b0e-4872-b2fa-4212eefbd471-client-ca\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.450296 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48f400b8-fff6-40d4-aedc-e58af846c1dc-config\") pod \"route-controller-manager-6dbcf7d5bc-c72t4\" (UID: \"48f400b8-fff6-40d4-aedc-e58af846c1dc\") " pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.450379 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/958a45cd-9b0e-4872-b2fa-4212eefbd471-serving-cert\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.450397 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958a45cd-9b0e-4872-b2fa-4212eefbd471-config\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.450423 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48f400b8-fff6-40d4-aedc-e58af846c1dc-serving-cert\") pod \"route-controller-manager-6dbcf7d5bc-c72t4\" (UID: \"48f400b8-fff6-40d4-aedc-e58af846c1dc\") " pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.454179 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.455774 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.488383 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp"] Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.552192 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/958a45cd-9b0e-4872-b2fa-4212eefbd471-serving-cert\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.552260 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958a45cd-9b0e-4872-b2fa-4212eefbd471-config\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.552296 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48f400b8-fff6-40d4-aedc-e58af846c1dc-serving-cert\") pod \"route-controller-manager-6dbcf7d5bc-c72t4\" (UID: \"48f400b8-fff6-40d4-aedc-e58af846c1dc\") " pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.552386 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cctn2\" (UniqueName: \"kubernetes.io/projected/48f400b8-fff6-40d4-aedc-e58af846c1dc-kube-api-access-cctn2\") pod \"route-controller-manager-6dbcf7d5bc-c72t4\" (UID: \"48f400b8-fff6-40d4-aedc-e58af846c1dc\") " pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.552428 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/48f400b8-fff6-40d4-aedc-e58af846c1dc-client-ca\") pod \"route-controller-manager-6dbcf7d5bc-c72t4\" (UID: \"48f400b8-fff6-40d4-aedc-e58af846c1dc\") " pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.552479 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/958a45cd-9b0e-4872-b2fa-4212eefbd471-client-ca\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.552514 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/958a45cd-9b0e-4872-b2fa-4212eefbd471-proxy-ca-bundles\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.552546 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48f400b8-fff6-40d4-aedc-e58af846c1dc-config\") pod \"route-controller-manager-6dbcf7d5bc-c72t4\" (UID: \"48f400b8-fff6-40d4-aedc-e58af846c1dc\") " pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.552588 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjhpw\" (UniqueName: \"kubernetes.io/projected/958a45cd-9b0e-4872-b2fa-4212eefbd471-kube-api-access-gjhpw\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.554333 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/48f400b8-fff6-40d4-aedc-e58af846c1dc-client-ca\") pod \"route-controller-manager-6dbcf7d5bc-c72t4\" (UID: \"48f400b8-fff6-40d4-aedc-e58af846c1dc\") " pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.555721 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958a45cd-9b0e-4872-b2fa-4212eefbd471-config\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.556846 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/958a45cd-9b0e-4872-b2fa-4212eefbd471-client-ca\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.558223 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48f400b8-fff6-40d4-aedc-e58af846c1dc-config\") pod \"route-controller-manager-6dbcf7d5bc-c72t4\" (UID: \"48f400b8-fff6-40d4-aedc-e58af846c1dc\") " pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.561905 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/958a45cd-9b0e-4872-b2fa-4212eefbd471-serving-cert\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.567791 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48f400b8-fff6-40d4-aedc-e58af846c1dc-serving-cert\") pod \"route-controller-manager-6dbcf7d5bc-c72t4\" (UID: \"48f400b8-fff6-40d4-aedc-e58af846c1dc\") " pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.575000 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cctn2\" (UniqueName: \"kubernetes.io/projected/48f400b8-fff6-40d4-aedc-e58af846c1dc-kube-api-access-cctn2\") pod \"route-controller-manager-6dbcf7d5bc-c72t4\" (UID: \"48f400b8-fff6-40d4-aedc-e58af846c1dc\") " pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.653845 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjhpw\" (UniqueName: \"kubernetes.io/projected/958a45cd-9b0e-4872-b2fa-4212eefbd471-kube-api-access-gjhpw\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.654403 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/958a45cd-9b0e-4872-b2fa-4212eefbd471-proxy-ca-bundles\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.655359 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/958a45cd-9b0e-4872-b2fa-4212eefbd471-proxy-ca-bundles\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.670902 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjhpw\" (UniqueName: \"kubernetes.io/projected/958a45cd-9b0e-4872-b2fa-4212eefbd471-kube-api-access-gjhpw\") pod \"controller-manager-6d8dc5c4db-lvkbp\" (UID: \"958a45cd-9b0e-4872-b2fa-4212eefbd471\") " pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.757678 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:00 crc kubenswrapper[4912]: I1208 21:24:00.769847 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:01 crc kubenswrapper[4912]: I1208 21:24:01.049462 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4"] Dec 08 21:24:01 crc kubenswrapper[4912]: I1208 21:24:01.167745 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" event={"ID":"48f400b8-fff6-40d4-aedc-e58af846c1dc","Type":"ContainerStarted","Data":"ab46c6cfb448b68f2bff799f0cd58fb4fd1c8f1c1fcaef8814a43c28a0ccb4f7"} Dec 08 21:24:01 crc kubenswrapper[4912]: I1208 21:24:01.253173 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp"] Dec 08 21:24:01 crc kubenswrapper[4912]: W1208 21:24:01.258426 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod958a45cd_9b0e_4872_b2fa_4212eefbd471.slice/crio-54eb19dc6a000816c96b62771a50932c2aca6a5be5536b403f039402f2c25d23 WatchSource:0}: Error finding container 54eb19dc6a000816c96b62771a50932c2aca6a5be5536b403f039402f2c25d23: Status 404 returned error can't find the container with id 54eb19dc6a000816c96b62771a50932c2aca6a5be5536b403f039402f2c25d23 Dec 08 21:24:02 crc kubenswrapper[4912]: I1208 21:24:02.324817 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" event={"ID":"958a45cd-9b0e-4872-b2fa-4212eefbd471","Type":"ContainerStarted","Data":"d317c1db31bda84ab775c72268d1e0e59a496b9345f04cc0066cd171c5f77a7e"} Dec 08 21:24:02 crc kubenswrapper[4912]: I1208 21:24:02.325765 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" event={"ID":"958a45cd-9b0e-4872-b2fa-4212eefbd471","Type":"ContainerStarted","Data":"54eb19dc6a000816c96b62771a50932c2aca6a5be5536b403f039402f2c25d23"} Dec 08 21:24:02 crc kubenswrapper[4912]: I1208 21:24:02.326169 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:02 crc kubenswrapper[4912]: I1208 21:24:02.332215 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" Dec 08 21:24:02 crc kubenswrapper[4912]: I1208 21:24:02.333164 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" event={"ID":"48f400b8-fff6-40d4-aedc-e58af846c1dc","Type":"ContainerStarted","Data":"428eace9087ece469a3d6a33671bd1adfd2ec9607058b6a1bbec2f38ec2aeb78"} Dec 08 21:24:02 crc kubenswrapper[4912]: I1208 21:24:02.350284 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6d8dc5c4db-lvkbp" podStartSLOduration=4.3502575310000005 podStartE2EDuration="4.350257531s" podCreationTimestamp="2025-12-08 21:23:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:24:02.349441923 +0000 UTC m=+324.212444006" watchObservedRunningTime="2025-12-08 21:24:02.350257531 +0000 UTC m=+324.213259614" Dec 08 21:24:02 crc kubenswrapper[4912]: I1208 21:24:02.444323 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" podStartSLOduration=4.444296361 podStartE2EDuration="4.444296361s" podCreationTimestamp="2025-12-08 21:23:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:24:02.442792267 +0000 UTC m=+324.305794360" watchObservedRunningTime="2025-12-08 21:24:02.444296361 +0000 UTC m=+324.307298444" Dec 08 21:24:03 crc kubenswrapper[4912]: I1208 21:24:03.341355 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:03 crc kubenswrapper[4912]: I1208 21:24:03.350067 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6dbcf7d5bc-c72t4" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.034692 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-cl4bh"] Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.036384 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.056174 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-cl4bh"] Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.198771 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7db8cca8-8116-4c4e-8932-da294519513e-registry-tls\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.198825 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7db8cca8-8116-4c4e-8932-da294519513e-bound-sa-token\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.198857 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7db8cca8-8116-4c4e-8932-da294519513e-registry-certificates\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.198874 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7x24\" (UniqueName: \"kubernetes.io/projected/7db8cca8-8116-4c4e-8932-da294519513e-kube-api-access-k7x24\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.198896 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7db8cca8-8116-4c4e-8932-da294519513e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.198926 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7db8cca8-8116-4c4e-8932-da294519513e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.199059 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.199106 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7db8cca8-8116-4c4e-8932-da294519513e-trusted-ca\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.300638 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7db8cca8-8116-4c4e-8932-da294519513e-trusted-ca\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.300787 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7db8cca8-8116-4c4e-8932-da294519513e-registry-tls\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.300819 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7db8cca8-8116-4c4e-8932-da294519513e-bound-sa-token\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.300853 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7db8cca8-8116-4c4e-8932-da294519513e-registry-certificates\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.300883 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7x24\" (UniqueName: \"kubernetes.io/projected/7db8cca8-8116-4c4e-8932-da294519513e-kube-api-access-k7x24\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.300917 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7db8cca8-8116-4c4e-8932-da294519513e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.300951 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7db8cca8-8116-4c4e-8932-da294519513e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.301837 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7db8cca8-8116-4c4e-8932-da294519513e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.301904 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7db8cca8-8116-4c4e-8932-da294519513e-trusted-ca\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.302723 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7db8cca8-8116-4c4e-8932-da294519513e-registry-certificates\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.307478 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7db8cca8-8116-4c4e-8932-da294519513e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.307825 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7db8cca8-8116-4c4e-8932-da294519513e-registry-tls\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.318049 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7x24\" (UniqueName: \"kubernetes.io/projected/7db8cca8-8116-4c4e-8932-da294519513e-kube-api-access-k7x24\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.329863 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7db8cca8-8116-4c4e-8932-da294519513e-bound-sa-token\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.400239 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-cl4bh\" (UID: \"7db8cca8-8116-4c4e-8932-da294519513e\") " pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:20 crc kubenswrapper[4912]: I1208 21:24:20.654185 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:21 crc kubenswrapper[4912]: I1208 21:24:21.142586 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-cl4bh"] Dec 08 21:24:21 crc kubenswrapper[4912]: W1208 21:24:21.143581 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7db8cca8_8116_4c4e_8932_da294519513e.slice/crio-cd6bf3ae08abacd7f5b2efa38e5e761c536a2bafcef4ed5a40c52ef2a1957b97 WatchSource:0}: Error finding container cd6bf3ae08abacd7f5b2efa38e5e761c536a2bafcef4ed5a40c52ef2a1957b97: Status 404 returned error can't find the container with id cd6bf3ae08abacd7f5b2efa38e5e761c536a2bafcef4ed5a40c52ef2a1957b97 Dec 08 21:24:21 crc kubenswrapper[4912]: I1208 21:24:21.443472 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" event={"ID":"7db8cca8-8116-4c4e-8932-da294519513e","Type":"ContainerStarted","Data":"eab32861442a34c38c2120c0ffe482b943301573ad63bcceda089f28bcc593e0"} Dec 08 21:24:21 crc kubenswrapper[4912]: I1208 21:24:21.443550 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" event={"ID":"7db8cca8-8116-4c4e-8932-da294519513e","Type":"ContainerStarted","Data":"cd6bf3ae08abacd7f5b2efa38e5e761c536a2bafcef4ed5a40c52ef2a1957b97"} Dec 08 21:24:21 crc kubenswrapper[4912]: I1208 21:24:21.443797 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:21 crc kubenswrapper[4912]: I1208 21:24:21.473624 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" podStartSLOduration=1.473598472 podStartE2EDuration="1.473598472s" podCreationTimestamp="2025-12-08 21:24:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:24:21.469565257 +0000 UTC m=+343.332567350" watchObservedRunningTime="2025-12-08 21:24:21.473598472 +0000 UTC m=+343.336600555" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.026291 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2k2fc"] Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.027452 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2k2fc" podUID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" containerName="registry-server" containerID="cri-o://271b5d86ee2f8cbcfd548439f502d51f93173c17221151f467ce4e2e2f775f43" gracePeriod=30 Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.037889 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2x2xd"] Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.038548 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2x2xd" podUID="c56535e8-663a-43a9-b596-79a4d3ac0403" containerName="registry-server" containerID="cri-o://ff9b26030fbd2747a1d60dfe49ae1c286369d718dbcf641b7bbc448b20fcbab3" gracePeriod=30 Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.204237 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mw698"] Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.205174 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" podUID="9f40e8be-f418-4be6-912f-d732718730e7" containerName="marketplace-operator" containerID="cri-o://315c41b17ab66359c798610e4cddd1f7dd93b8b83817b2824b14888807a9167a" gracePeriod=30 Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.234538 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8p8tl"] Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.234879 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8p8tl" podUID="ba9bc9a7-7ac4-43eb-a545-099564781a42" containerName="registry-server" containerID="cri-o://8929b262f7c1a2da65b5091b9c1a0f61d1a26f6903d79f85c11d68579fb78210" gracePeriod=30 Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.245623 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zgnwz"] Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.247250 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.258931 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7nhqb"] Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.259399 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7nhqb" podUID="3602ee8f-3aa3-4873-a791-5e695083cd99" containerName="registry-server" containerID="cri-o://a0fadcbf89dc836c83019ef396e89af7dd2bd7912cf2a611c205b93334fc56d5" gracePeriod=30 Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.266507 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zgnwz"] Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.281322 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x45cl\" (UniqueName: \"kubernetes.io/projected/fc8cf81c-b57c-4849-9e7c-10b0753855b1-kube-api-access-x45cl\") pod \"marketplace-operator-79b997595-zgnwz\" (UID: \"fc8cf81c-b57c-4849-9e7c-10b0753855b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.281407 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fc8cf81c-b57c-4849-9e7c-10b0753855b1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zgnwz\" (UID: \"fc8cf81c-b57c-4849-9e7c-10b0753855b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.281453 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fc8cf81c-b57c-4849-9e7c-10b0753855b1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zgnwz\" (UID: \"fc8cf81c-b57c-4849-9e7c-10b0753855b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.383439 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fc8cf81c-b57c-4849-9e7c-10b0753855b1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zgnwz\" (UID: \"fc8cf81c-b57c-4849-9e7c-10b0753855b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.383518 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x45cl\" (UniqueName: \"kubernetes.io/projected/fc8cf81c-b57c-4849-9e7c-10b0753855b1-kube-api-access-x45cl\") pod \"marketplace-operator-79b997595-zgnwz\" (UID: \"fc8cf81c-b57c-4849-9e7c-10b0753855b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.383552 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fc8cf81c-b57c-4849-9e7c-10b0753855b1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zgnwz\" (UID: \"fc8cf81c-b57c-4849-9e7c-10b0753855b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.385685 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fc8cf81c-b57c-4849-9e7c-10b0753855b1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zgnwz\" (UID: \"fc8cf81c-b57c-4849-9e7c-10b0753855b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.401375 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fc8cf81c-b57c-4849-9e7c-10b0753855b1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zgnwz\" (UID: \"fc8cf81c-b57c-4849-9e7c-10b0753855b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.402698 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x45cl\" (UniqueName: \"kubernetes.io/projected/fc8cf81c-b57c-4849-9e7c-10b0753855b1-kube-api-access-x45cl\") pod \"marketplace-operator-79b997595-zgnwz\" (UID: \"fc8cf81c-b57c-4849-9e7c-10b0753855b1\") " pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.515024 4912 generic.go:334] "Generic (PLEG): container finished" podID="9f40e8be-f418-4be6-912f-d732718730e7" containerID="315c41b17ab66359c798610e4cddd1f7dd93b8b83817b2824b14888807a9167a" exitCode=0 Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.515128 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" event={"ID":"9f40e8be-f418-4be6-912f-d732718730e7","Type":"ContainerDied","Data":"315c41b17ab66359c798610e4cddd1f7dd93b8b83817b2824b14888807a9167a"} Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.515177 4912 scope.go:117] "RemoveContainer" containerID="71906c229188b9c1373923699b33a89a038f1bf2da6626c12ba743ea673cba6a" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.518610 4912 generic.go:334] "Generic (PLEG): container finished" podID="ba9bc9a7-7ac4-43eb-a545-099564781a42" containerID="8929b262f7c1a2da65b5091b9c1a0f61d1a26f6903d79f85c11d68579fb78210" exitCode=0 Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.518678 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p8tl" event={"ID":"ba9bc9a7-7ac4-43eb-a545-099564781a42","Type":"ContainerDied","Data":"8929b262f7c1a2da65b5091b9c1a0f61d1a26f6903d79f85c11d68579fb78210"} Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.521752 4912 generic.go:334] "Generic (PLEG): container finished" podID="3602ee8f-3aa3-4873-a791-5e695083cd99" containerID="a0fadcbf89dc836c83019ef396e89af7dd2bd7912cf2a611c205b93334fc56d5" exitCode=0 Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.521859 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nhqb" event={"ID":"3602ee8f-3aa3-4873-a791-5e695083cd99","Type":"ContainerDied","Data":"a0fadcbf89dc836c83019ef396e89af7dd2bd7912cf2a611c205b93334fc56d5"} Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.535682 4912 generic.go:334] "Generic (PLEG): container finished" podID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" containerID="271b5d86ee2f8cbcfd548439f502d51f93173c17221151f467ce4e2e2f775f43" exitCode=0 Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.535785 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2k2fc" event={"ID":"570435e6-b620-4b1c-8f4b-47b36f3bee5e","Type":"ContainerDied","Data":"271b5d86ee2f8cbcfd548439f502d51f93173c17221151f467ce4e2e2f775f43"} Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.539933 4912 generic.go:334] "Generic (PLEG): container finished" podID="c56535e8-663a-43a9-b596-79a4d3ac0403" containerID="ff9b26030fbd2747a1d60dfe49ae1c286369d718dbcf641b7bbc448b20fcbab3" exitCode=0 Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.540002 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2x2xd" event={"ID":"c56535e8-663a-43a9-b596-79a4d3ac0403","Type":"ContainerDied","Data":"ff9b26030fbd2747a1d60dfe49ae1c286369d718dbcf641b7bbc448b20fcbab3"} Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.621587 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.699073 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.788416 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f40e8be-f418-4be6-912f-d732718730e7-marketplace-trusted-ca\") pod \"9f40e8be-f418-4be6-912f-d732718730e7\" (UID: \"9f40e8be-f418-4be6-912f-d732718730e7\") " Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.788558 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f40e8be-f418-4be6-912f-d732718730e7-marketplace-operator-metrics\") pod \"9f40e8be-f418-4be6-912f-d732718730e7\" (UID: \"9f40e8be-f418-4be6-912f-d732718730e7\") " Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.788594 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gx59x\" (UniqueName: \"kubernetes.io/projected/9f40e8be-f418-4be6-912f-d732718730e7-kube-api-access-gx59x\") pod \"9f40e8be-f418-4be6-912f-d732718730e7\" (UID: \"9f40e8be-f418-4be6-912f-d732718730e7\") " Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.789464 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f40e8be-f418-4be6-912f-d732718730e7-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "9f40e8be-f418-4be6-912f-d732718730e7" (UID: "9f40e8be-f418-4be6-912f-d732718730e7"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.794951 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f40e8be-f418-4be6-912f-d732718730e7-kube-api-access-gx59x" (OuterVolumeSpecName: "kube-api-access-gx59x") pod "9f40e8be-f418-4be6-912f-d732718730e7" (UID: "9f40e8be-f418-4be6-912f-d732718730e7"). InnerVolumeSpecName "kube-api-access-gx59x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.795460 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f40e8be-f418-4be6-912f-d732718730e7-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "9f40e8be-f418-4be6-912f-d732718730e7" (UID: "9f40e8be-f418-4be6-912f-d732718730e7"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.964495 4912 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f40e8be-f418-4be6-912f-d732718730e7-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.964520 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gx59x\" (UniqueName: \"kubernetes.io/projected/9f40e8be-f418-4be6-912f-d732718730e7-kube-api-access-gx59x\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:29 crc kubenswrapper[4912]: I1208 21:24:29.964529 4912 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f40e8be-f418-4be6-912f-d732718730e7-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.227746 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zgnwz"] Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.387325 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.511793 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.548026 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2k2fc" event={"ID":"570435e6-b620-4b1c-8f4b-47b36f3bee5e","Type":"ContainerDied","Data":"e299148a99ae9434f3cac07d64987cd7ac866147fa1eb91a4c78128660fd2475"} Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.548112 4912 scope.go:117] "RemoveContainer" containerID="271b5d86ee2f8cbcfd548439f502d51f93173c17221151f467ce4e2e2f775f43" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.548215 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2k2fc" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.551234 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2x2xd" event={"ID":"c56535e8-663a-43a9-b596-79a4d3ac0403","Type":"ContainerDied","Data":"08ab12aad0099216238e1c7e672aa433a3e704f4a8dde1d4f4e9cfab21cf511c"} Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.551396 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2x2xd" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.553450 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" event={"ID":"9f40e8be-f418-4be6-912f-d732718730e7","Type":"ContainerDied","Data":"fcbbd3eb0440f3b9506a0de3c9a83f9a75740de8a170432f83d17b009fa0323b"} Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.553548 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mw698" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.559480 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" event={"ID":"fc8cf81c-b57c-4849-9e7c-10b0753855b1","Type":"ContainerStarted","Data":"54c1d2586c402e1cb1e7791099daa7bd2da63cfb0569248055ed43aad530756b"} Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.574225 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/570435e6-b620-4b1c-8f4b-47b36f3bee5e-catalog-content\") pod \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\" (UID: \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\") " Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.574275 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c56535e8-663a-43a9-b596-79a4d3ac0403-catalog-content\") pod \"c56535e8-663a-43a9-b596-79a4d3ac0403\" (UID: \"c56535e8-663a-43a9-b596-79a4d3ac0403\") " Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.574329 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nntdh\" (UniqueName: \"kubernetes.io/projected/c56535e8-663a-43a9-b596-79a4d3ac0403-kube-api-access-nntdh\") pod \"c56535e8-663a-43a9-b596-79a4d3ac0403\" (UID: \"c56535e8-663a-43a9-b596-79a4d3ac0403\") " Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.574355 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c56535e8-663a-43a9-b596-79a4d3ac0403-utilities\") pod \"c56535e8-663a-43a9-b596-79a4d3ac0403\" (UID: \"c56535e8-663a-43a9-b596-79a4d3ac0403\") " Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.574373 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm64g\" (UniqueName: \"kubernetes.io/projected/570435e6-b620-4b1c-8f4b-47b36f3bee5e-kube-api-access-zm64g\") pod \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\" (UID: \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\") " Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.574486 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/570435e6-b620-4b1c-8f4b-47b36f3bee5e-utilities\") pod \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\" (UID: \"570435e6-b620-4b1c-8f4b-47b36f3bee5e\") " Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.575856 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c56535e8-663a-43a9-b596-79a4d3ac0403-utilities" (OuterVolumeSpecName: "utilities") pod "c56535e8-663a-43a9-b596-79a4d3ac0403" (UID: "c56535e8-663a-43a9-b596-79a4d3ac0403"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.576896 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/570435e6-b620-4b1c-8f4b-47b36f3bee5e-utilities" (OuterVolumeSpecName: "utilities") pod "570435e6-b620-4b1c-8f4b-47b36f3bee5e" (UID: "570435e6-b620-4b1c-8f4b-47b36f3bee5e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.579111 4912 scope.go:117] "RemoveContainer" containerID="32f8d40cf7223d47fd8efd65544ddc68d2d95ab53d18a8e7c7f70f5beb2203ae" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.579743 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mw698"] Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.583947 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mw698"] Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.587190 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/570435e6-b620-4b1c-8f4b-47b36f3bee5e-kube-api-access-zm64g" (OuterVolumeSpecName: "kube-api-access-zm64g") pod "570435e6-b620-4b1c-8f4b-47b36f3bee5e" (UID: "570435e6-b620-4b1c-8f4b-47b36f3bee5e"). InnerVolumeSpecName "kube-api-access-zm64g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.591219 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c56535e8-663a-43a9-b596-79a4d3ac0403-kube-api-access-nntdh" (OuterVolumeSpecName: "kube-api-access-nntdh") pod "c56535e8-663a-43a9-b596-79a4d3ac0403" (UID: "c56535e8-663a-43a9-b596-79a4d3ac0403"). InnerVolumeSpecName "kube-api-access-nntdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.602923 4912 scope.go:117] "RemoveContainer" containerID="50e5c979bb3d7905245a929fd342651abfde3873e6b2c7d909472e56b7073fdc" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.627804 4912 scope.go:117] "RemoveContainer" containerID="ff9b26030fbd2747a1d60dfe49ae1c286369d718dbcf641b7bbc448b20fcbab3" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.636702 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.642336 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/570435e6-b620-4b1c-8f4b-47b36f3bee5e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "570435e6-b620-4b1c-8f4b-47b36f3bee5e" (UID: "570435e6-b620-4b1c-8f4b-47b36f3bee5e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.643743 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.648104 4912 scope.go:117] "RemoveContainer" containerID="84b066d4828075bbce39a5ac3c27cd38321a9a49c5fec894a04658847c0c4127" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.660386 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c56535e8-663a-43a9-b596-79a4d3ac0403-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c56535e8-663a-43a9-b596-79a4d3ac0403" (UID: "c56535e8-663a-43a9-b596-79a4d3ac0403"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.676558 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/570435e6-b620-4b1c-8f4b-47b36f3bee5e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.676614 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c56535e8-663a-43a9-b596-79a4d3ac0403-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.676632 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nntdh\" (UniqueName: \"kubernetes.io/projected/c56535e8-663a-43a9-b596-79a4d3ac0403-kube-api-access-nntdh\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.676663 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c56535e8-663a-43a9-b596-79a4d3ac0403-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.676681 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm64g\" (UniqueName: \"kubernetes.io/projected/570435e6-b620-4b1c-8f4b-47b36f3bee5e-kube-api-access-zm64g\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.676694 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/570435e6-b620-4b1c-8f4b-47b36f3bee5e-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.690132 4912 scope.go:117] "RemoveContainer" containerID="7c2048822762cebadc26ba497ad3098ba8a102490a7e9b119c0d5912857be239" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.705907 4912 scope.go:117] "RemoveContainer" containerID="315c41b17ab66359c798610e4cddd1f7dd93b8b83817b2824b14888807a9167a" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.778617 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba9bc9a7-7ac4-43eb-a545-099564781a42-utilities\") pod \"ba9bc9a7-7ac4-43eb-a545-099564781a42\" (UID: \"ba9bc9a7-7ac4-43eb-a545-099564781a42\") " Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.779471 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3602ee8f-3aa3-4873-a791-5e695083cd99-catalog-content\") pod \"3602ee8f-3aa3-4873-a791-5e695083cd99\" (UID: \"3602ee8f-3aa3-4873-a791-5e695083cd99\") " Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.779708 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5rbg\" (UniqueName: \"kubernetes.io/projected/ba9bc9a7-7ac4-43eb-a545-099564781a42-kube-api-access-r5rbg\") pod \"ba9bc9a7-7ac4-43eb-a545-099564781a42\" (UID: \"ba9bc9a7-7ac4-43eb-a545-099564781a42\") " Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.779890 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba9bc9a7-7ac4-43eb-a545-099564781a42-catalog-content\") pod \"ba9bc9a7-7ac4-43eb-a545-099564781a42\" (UID: \"ba9bc9a7-7ac4-43eb-a545-099564781a42\") " Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.780138 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3602ee8f-3aa3-4873-a791-5e695083cd99-utilities\") pod \"3602ee8f-3aa3-4873-a791-5e695083cd99\" (UID: \"3602ee8f-3aa3-4873-a791-5e695083cd99\") " Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.780284 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xd4t\" (UniqueName: \"kubernetes.io/projected/3602ee8f-3aa3-4873-a791-5e695083cd99-kube-api-access-4xd4t\") pod \"3602ee8f-3aa3-4873-a791-5e695083cd99\" (UID: \"3602ee8f-3aa3-4873-a791-5e695083cd99\") " Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.780315 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba9bc9a7-7ac4-43eb-a545-099564781a42-utilities" (OuterVolumeSpecName: "utilities") pod "ba9bc9a7-7ac4-43eb-a545-099564781a42" (UID: "ba9bc9a7-7ac4-43eb-a545-099564781a42"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.780918 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba9bc9a7-7ac4-43eb-a545-099564781a42-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.781052 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3602ee8f-3aa3-4873-a791-5e695083cd99-utilities" (OuterVolumeSpecName: "utilities") pod "3602ee8f-3aa3-4873-a791-5e695083cd99" (UID: "3602ee8f-3aa3-4873-a791-5e695083cd99"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.784350 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3602ee8f-3aa3-4873-a791-5e695083cd99-kube-api-access-4xd4t" (OuterVolumeSpecName: "kube-api-access-4xd4t") pod "3602ee8f-3aa3-4873-a791-5e695083cd99" (UID: "3602ee8f-3aa3-4873-a791-5e695083cd99"). InnerVolumeSpecName "kube-api-access-4xd4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.784438 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba9bc9a7-7ac4-43eb-a545-099564781a42-kube-api-access-r5rbg" (OuterVolumeSpecName: "kube-api-access-r5rbg") pod "ba9bc9a7-7ac4-43eb-a545-099564781a42" (UID: "ba9bc9a7-7ac4-43eb-a545-099564781a42"). InnerVolumeSpecName "kube-api-access-r5rbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.800919 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba9bc9a7-7ac4-43eb-a545-099564781a42-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba9bc9a7-7ac4-43eb-a545-099564781a42" (UID: "ba9bc9a7-7ac4-43eb-a545-099564781a42"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.881232 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2k2fc"] Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.881549 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5rbg\" (UniqueName: \"kubernetes.io/projected/ba9bc9a7-7ac4-43eb-a545-099564781a42-kube-api-access-r5rbg\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.881575 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba9bc9a7-7ac4-43eb-a545-099564781a42-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.881585 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3602ee8f-3aa3-4873-a791-5e695083cd99-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.881595 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xd4t\" (UniqueName: \"kubernetes.io/projected/3602ee8f-3aa3-4873-a791-5e695083cd99-kube-api-access-4xd4t\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.891829 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2k2fc"] Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.896140 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2x2xd"] Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.899653 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2x2xd"] Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.902631 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3602ee8f-3aa3-4873-a791-5e695083cd99-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3602ee8f-3aa3-4873-a791-5e695083cd99" (UID: "3602ee8f-3aa3-4873-a791-5e695083cd99"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:24:30 crc kubenswrapper[4912]: I1208 21:24:30.988274 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3602ee8f-3aa3-4873-a791-5e695083cd99-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.571295 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8p8tl" Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.572182 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p8tl" event={"ID":"ba9bc9a7-7ac4-43eb-a545-099564781a42","Type":"ContainerDied","Data":"f0395209b9f27d006d9b867124389eba0248ec1bedcd4c3700a4832c376810f1"} Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.572259 4912 scope.go:117] "RemoveContainer" containerID="8929b262f7c1a2da65b5091b9c1a0f61d1a26f6903d79f85c11d68579fb78210" Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.576915 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nhqb" event={"ID":"3602ee8f-3aa3-4873-a791-5e695083cd99","Type":"ContainerDied","Data":"7e9d7df537650aad9ca0e95ee30276a9e296345ef9187b4891f04be83c1e1e77"} Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.576971 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7nhqb" Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.578526 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" event={"ID":"fc8cf81c-b57c-4849-9e7c-10b0753855b1","Type":"ContainerStarted","Data":"1a41d65ed7611ac66b38922bd25d7e655070dad4a8b553cbafb3123e2cc30b77"} Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.578821 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.582724 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.591751 4912 scope.go:117] "RemoveContainer" containerID="ec6e279a4c480d8f113079918cc9353a22c7e977220f7485ace6d62507b543ae" Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.612088 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-zgnwz" podStartSLOduration=2.612027627 podStartE2EDuration="2.612027627s" podCreationTimestamp="2025-12-08 21:24:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:24:31.606918737 +0000 UTC m=+353.469920820" watchObservedRunningTime="2025-12-08 21:24:31.612027627 +0000 UTC m=+353.475029710" Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.615680 4912 scope.go:117] "RemoveContainer" containerID="00f66cd362df975101e207944c3b17363c0581f337d86ddb4806c99409d9950c" Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.634057 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8p8tl"] Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.636908 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8p8tl"] Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.652152 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7nhqb"] Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.653346 4912 scope.go:117] "RemoveContainer" containerID="a0fadcbf89dc836c83019ef396e89af7dd2bd7912cf2a611c205b93334fc56d5" Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.657631 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7nhqb"] Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.668340 4912 scope.go:117] "RemoveContainer" containerID="7439640f3253238f92481d2c93e0fc41e36c6ae6b559e137af1298e73d3832c2" Dec 08 21:24:31 crc kubenswrapper[4912]: I1208 21:24:31.691561 4912 scope.go:117] "RemoveContainer" containerID="64c5aeb572ddc8957e12d027ff43b17bba6bff13a253f200886087966c4fa124" Dec 08 21:24:32 crc kubenswrapper[4912]: I1208 21:24:32.434430 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3602ee8f-3aa3-4873-a791-5e695083cd99" path="/var/lib/kubelet/pods/3602ee8f-3aa3-4873-a791-5e695083cd99/volumes" Dec 08 21:24:32 crc kubenswrapper[4912]: I1208 21:24:32.435868 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" path="/var/lib/kubelet/pods/570435e6-b620-4b1c-8f4b-47b36f3bee5e/volumes" Dec 08 21:24:32 crc kubenswrapper[4912]: I1208 21:24:32.436534 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f40e8be-f418-4be6-912f-d732718730e7" path="/var/lib/kubelet/pods/9f40e8be-f418-4be6-912f-d732718730e7/volumes" Dec 08 21:24:32 crc kubenswrapper[4912]: I1208 21:24:32.437007 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba9bc9a7-7ac4-43eb-a545-099564781a42" path="/var/lib/kubelet/pods/ba9bc9a7-7ac4-43eb-a545-099564781a42/volumes" Dec 08 21:24:32 crc kubenswrapper[4912]: I1208 21:24:32.438201 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c56535e8-663a-43a9-b596-79a4d3ac0403" path="/var/lib/kubelet/pods/c56535e8-663a-43a9-b596-79a4d3ac0403/volumes" Dec 08 21:24:32 crc kubenswrapper[4912]: I1208 21:24:32.975439 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:24:32 crc kubenswrapper[4912]: I1208 21:24:32.976785 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663190 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gc68w"] Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663518 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f40e8be-f418-4be6-912f-d732718730e7" containerName="marketplace-operator" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663538 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f40e8be-f418-4be6-912f-d732718730e7" containerName="marketplace-operator" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663553 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3602ee8f-3aa3-4873-a791-5e695083cd99" containerName="extract-content" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663562 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="3602ee8f-3aa3-4873-a791-5e695083cd99" containerName="extract-content" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663578 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c56535e8-663a-43a9-b596-79a4d3ac0403" containerName="registry-server" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663587 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c56535e8-663a-43a9-b596-79a4d3ac0403" containerName="registry-server" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663603 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c56535e8-663a-43a9-b596-79a4d3ac0403" containerName="extract-content" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663612 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c56535e8-663a-43a9-b596-79a4d3ac0403" containerName="extract-content" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663622 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c56535e8-663a-43a9-b596-79a4d3ac0403" containerName="extract-utilities" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663630 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c56535e8-663a-43a9-b596-79a4d3ac0403" containerName="extract-utilities" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663641 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3602ee8f-3aa3-4873-a791-5e695083cd99" containerName="extract-utilities" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663648 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="3602ee8f-3aa3-4873-a791-5e695083cd99" containerName="extract-utilities" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663658 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba9bc9a7-7ac4-43eb-a545-099564781a42" containerName="registry-server" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663665 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba9bc9a7-7ac4-43eb-a545-099564781a42" containerName="registry-server" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663678 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" containerName="registry-server" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663687 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" containerName="registry-server" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663702 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" containerName="extract-utilities" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663712 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" containerName="extract-utilities" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663724 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" containerName="extract-content" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663732 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" containerName="extract-content" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663743 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba9bc9a7-7ac4-43eb-a545-099564781a42" containerName="extract-utilities" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663751 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba9bc9a7-7ac4-43eb-a545-099564781a42" containerName="extract-utilities" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663767 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba9bc9a7-7ac4-43eb-a545-099564781a42" containerName="extract-content" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663775 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba9bc9a7-7ac4-43eb-a545-099564781a42" containerName="extract-content" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.663788 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3602ee8f-3aa3-4873-a791-5e695083cd99" containerName="registry-server" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663796 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="3602ee8f-3aa3-4873-a791-5e695083cd99" containerName="registry-server" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663922 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f40e8be-f418-4be6-912f-d732718730e7" containerName="marketplace-operator" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663939 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="570435e6-b620-4b1c-8f4b-47b36f3bee5e" containerName="registry-server" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663954 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="c56535e8-663a-43a9-b596-79a4d3ac0403" containerName="registry-server" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663964 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="3602ee8f-3aa3-4873-a791-5e695083cd99" containerName="registry-server" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.663977 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba9bc9a7-7ac4-43eb-a545-099564781a42" containerName="registry-server" Dec 08 21:24:33 crc kubenswrapper[4912]: E1208 21:24:33.664104 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f40e8be-f418-4be6-912f-d732718730e7" containerName="marketplace-operator" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.664117 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f40e8be-f418-4be6-912f-d732718730e7" containerName="marketplace-operator" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.664241 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f40e8be-f418-4be6-912f-d732718730e7" containerName="marketplace-operator" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.665227 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.670812 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.680960 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gc68w"] Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.838468 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ct2d\" (UniqueName: \"kubernetes.io/projected/66f84380-1e3e-4023-b2be-78c959851aef-kube-api-access-6ct2d\") pod \"redhat-operators-gc68w\" (UID: \"66f84380-1e3e-4023-b2be-78c959851aef\") " pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.838549 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f84380-1e3e-4023-b2be-78c959851aef-utilities\") pod \"redhat-operators-gc68w\" (UID: \"66f84380-1e3e-4023-b2be-78c959851aef\") " pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.838576 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f84380-1e3e-4023-b2be-78c959851aef-catalog-content\") pod \"redhat-operators-gc68w\" (UID: \"66f84380-1e3e-4023-b2be-78c959851aef\") " pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.939814 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ct2d\" (UniqueName: \"kubernetes.io/projected/66f84380-1e3e-4023-b2be-78c959851aef-kube-api-access-6ct2d\") pod \"redhat-operators-gc68w\" (UID: \"66f84380-1e3e-4023-b2be-78c959851aef\") " pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.940289 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f84380-1e3e-4023-b2be-78c959851aef-utilities\") pod \"redhat-operators-gc68w\" (UID: \"66f84380-1e3e-4023-b2be-78c959851aef\") " pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.940314 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f84380-1e3e-4023-b2be-78c959851aef-catalog-content\") pod \"redhat-operators-gc68w\" (UID: \"66f84380-1e3e-4023-b2be-78c959851aef\") " pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.940754 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f84380-1e3e-4023-b2be-78c959851aef-utilities\") pod \"redhat-operators-gc68w\" (UID: \"66f84380-1e3e-4023-b2be-78c959851aef\") " pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.940856 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f84380-1e3e-4023-b2be-78c959851aef-catalog-content\") pod \"redhat-operators-gc68w\" (UID: \"66f84380-1e3e-4023-b2be-78c959851aef\") " pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.963664 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ct2d\" (UniqueName: \"kubernetes.io/projected/66f84380-1e3e-4023-b2be-78c959851aef-kube-api-access-6ct2d\") pod \"redhat-operators-gc68w\" (UID: \"66f84380-1e3e-4023-b2be-78c959851aef\") " pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:33 crc kubenswrapper[4912]: I1208 21:24:33.985355 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:34 crc kubenswrapper[4912]: I1208 21:24:34.502963 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gc68w"] Dec 08 21:24:34 crc kubenswrapper[4912]: I1208 21:24:34.709493 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gc68w" event={"ID":"66f84380-1e3e-4023-b2be-78c959851aef","Type":"ContainerStarted","Data":"045234994c8ab93378e33b01e2408fc1ae712985e42ca690af1b9fcf591cee1c"} Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.452933 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vjr7b"] Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.454530 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.456928 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.475512 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vjr7b"] Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.566751 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbf8f\" (UniqueName: \"kubernetes.io/projected/e29efa51-b798-4e0a-bf88-27affe2b33ab-kube-api-access-vbf8f\") pod \"community-operators-vjr7b\" (UID: \"e29efa51-b798-4e0a-bf88-27affe2b33ab\") " pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.566898 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e29efa51-b798-4e0a-bf88-27affe2b33ab-catalog-content\") pod \"community-operators-vjr7b\" (UID: \"e29efa51-b798-4e0a-bf88-27affe2b33ab\") " pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.566918 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e29efa51-b798-4e0a-bf88-27affe2b33ab-utilities\") pod \"community-operators-vjr7b\" (UID: \"e29efa51-b798-4e0a-bf88-27affe2b33ab\") " pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.667848 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e29efa51-b798-4e0a-bf88-27affe2b33ab-catalog-content\") pod \"community-operators-vjr7b\" (UID: \"e29efa51-b798-4e0a-bf88-27affe2b33ab\") " pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.667894 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e29efa51-b798-4e0a-bf88-27affe2b33ab-utilities\") pod \"community-operators-vjr7b\" (UID: \"e29efa51-b798-4e0a-bf88-27affe2b33ab\") " pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.667948 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbf8f\" (UniqueName: \"kubernetes.io/projected/e29efa51-b798-4e0a-bf88-27affe2b33ab-kube-api-access-vbf8f\") pod \"community-operators-vjr7b\" (UID: \"e29efa51-b798-4e0a-bf88-27affe2b33ab\") " pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.668551 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e29efa51-b798-4e0a-bf88-27affe2b33ab-catalog-content\") pod \"community-operators-vjr7b\" (UID: \"e29efa51-b798-4e0a-bf88-27affe2b33ab\") " pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.668618 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e29efa51-b798-4e0a-bf88-27affe2b33ab-utilities\") pod \"community-operators-vjr7b\" (UID: \"e29efa51-b798-4e0a-bf88-27affe2b33ab\") " pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.719080 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbf8f\" (UniqueName: \"kubernetes.io/projected/e29efa51-b798-4e0a-bf88-27affe2b33ab-kube-api-access-vbf8f\") pod \"community-operators-vjr7b\" (UID: \"e29efa51-b798-4e0a-bf88-27affe2b33ab\") " pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.722905 4912 generic.go:334] "Generic (PLEG): container finished" podID="66f84380-1e3e-4023-b2be-78c959851aef" containerID="ff70af9a961c6f4823541629e5551a1435849f7e0603222d8bfaac5a54f1083b" exitCode=0 Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.722962 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gc68w" event={"ID":"66f84380-1e3e-4023-b2be-78c959851aef","Type":"ContainerDied","Data":"ff70af9a961c6f4823541629e5551a1435849f7e0603222d8bfaac5a54f1083b"} Dec 08 21:24:35 crc kubenswrapper[4912]: I1208 21:24:35.775369 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.066015 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gjqv9"] Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.067761 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.071145 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.073349 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gjqv9"] Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.075309 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9834161-a62c-4258-963d-3216a0f2d185-utilities\") pod \"certified-operators-gjqv9\" (UID: \"b9834161-a62c-4258-963d-3216a0f2d185\") " pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.075342 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxn8j\" (UniqueName: \"kubernetes.io/projected/b9834161-a62c-4258-963d-3216a0f2d185-kube-api-access-lxn8j\") pod \"certified-operators-gjqv9\" (UID: \"b9834161-a62c-4258-963d-3216a0f2d185\") " pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.075378 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9834161-a62c-4258-963d-3216a0f2d185-catalog-content\") pod \"certified-operators-gjqv9\" (UID: \"b9834161-a62c-4258-963d-3216a0f2d185\") " pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.176875 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9834161-a62c-4258-963d-3216a0f2d185-utilities\") pod \"certified-operators-gjqv9\" (UID: \"b9834161-a62c-4258-963d-3216a0f2d185\") " pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.176965 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxn8j\" (UniqueName: \"kubernetes.io/projected/b9834161-a62c-4258-963d-3216a0f2d185-kube-api-access-lxn8j\") pod \"certified-operators-gjqv9\" (UID: \"b9834161-a62c-4258-963d-3216a0f2d185\") " pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.177004 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9834161-a62c-4258-963d-3216a0f2d185-catalog-content\") pod \"certified-operators-gjqv9\" (UID: \"b9834161-a62c-4258-963d-3216a0f2d185\") " pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.177573 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9834161-a62c-4258-963d-3216a0f2d185-utilities\") pod \"certified-operators-gjqv9\" (UID: \"b9834161-a62c-4258-963d-3216a0f2d185\") " pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.177999 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9834161-a62c-4258-963d-3216a0f2d185-catalog-content\") pod \"certified-operators-gjqv9\" (UID: \"b9834161-a62c-4258-963d-3216a0f2d185\") " pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.203152 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxn8j\" (UniqueName: \"kubernetes.io/projected/b9834161-a62c-4258-963d-3216a0f2d185-kube-api-access-lxn8j\") pod \"certified-operators-gjqv9\" (UID: \"b9834161-a62c-4258-963d-3216a0f2d185\") " pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.294523 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vjr7b"] Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.390056 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.631417 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gjqv9"] Dec 08 21:24:36 crc kubenswrapper[4912]: W1208 21:24:36.636550 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9834161_a62c_4258_963d_3216a0f2d185.slice/crio-abec0d2eb2f021d0f5c3ca035904c5978a69faea6c56ac26106f461b91d43fd6 WatchSource:0}: Error finding container abec0d2eb2f021d0f5c3ca035904c5978a69faea6c56ac26106f461b91d43fd6: Status 404 returned error can't find the container with id abec0d2eb2f021d0f5c3ca035904c5978a69faea6c56ac26106f461b91d43fd6 Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.736220 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gjqv9" event={"ID":"b9834161-a62c-4258-963d-3216a0f2d185","Type":"ContainerStarted","Data":"abec0d2eb2f021d0f5c3ca035904c5978a69faea6c56ac26106f461b91d43fd6"} Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.753676 4912 generic.go:334] "Generic (PLEG): container finished" podID="e29efa51-b798-4e0a-bf88-27affe2b33ab" containerID="c4deabd20354b999b6f6f6e3558cbeb6fdb2aa34582da1e506350049cda58bde" exitCode=0 Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.753798 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vjr7b" event={"ID":"e29efa51-b798-4e0a-bf88-27affe2b33ab","Type":"ContainerDied","Data":"c4deabd20354b999b6f6f6e3558cbeb6fdb2aa34582da1e506350049cda58bde"} Dec 08 21:24:36 crc kubenswrapper[4912]: I1208 21:24:36.754184 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vjr7b" event={"ID":"e29efa51-b798-4e0a-bf88-27affe2b33ab","Type":"ContainerStarted","Data":"14a48d1bc133bc463b9033338c7ce63531d713b650186020f42313750f4bed6b"} Dec 08 21:24:37 crc kubenswrapper[4912]: I1208 21:24:37.765502 4912 generic.go:334] "Generic (PLEG): container finished" podID="b9834161-a62c-4258-963d-3216a0f2d185" containerID="734ff729ff44b57a96bbf69099cb31df26602ebb98c0b6737f8e1a7ba4e349c5" exitCode=0 Dec 08 21:24:37 crc kubenswrapper[4912]: I1208 21:24:37.765960 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gjqv9" event={"ID":"b9834161-a62c-4258-963d-3216a0f2d185","Type":"ContainerDied","Data":"734ff729ff44b57a96bbf69099cb31df26602ebb98c0b6737f8e1a7ba4e349c5"} Dec 08 21:24:37 crc kubenswrapper[4912]: I1208 21:24:37.771561 4912 generic.go:334] "Generic (PLEG): container finished" podID="66f84380-1e3e-4023-b2be-78c959851aef" containerID="80d8a17f73f1c097a23b97a4fd9503451850f7b3372d8b2bba142ad02f336755" exitCode=0 Dec 08 21:24:37 crc kubenswrapper[4912]: I1208 21:24:37.771618 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gc68w" event={"ID":"66f84380-1e3e-4023-b2be-78c959851aef","Type":"ContainerDied","Data":"80d8a17f73f1c097a23b97a4fd9503451850f7b3372d8b2bba142ad02f336755"} Dec 08 21:24:37 crc kubenswrapper[4912]: I1208 21:24:37.777421 4912 generic.go:334] "Generic (PLEG): container finished" podID="e29efa51-b798-4e0a-bf88-27affe2b33ab" containerID="6b9bef6c2a2f0d358bd9c4c4151dc826d7189334c2a498f64f353afe2669d579" exitCode=0 Dec 08 21:24:37 crc kubenswrapper[4912]: I1208 21:24:37.777487 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vjr7b" event={"ID":"e29efa51-b798-4e0a-bf88-27affe2b33ab","Type":"ContainerDied","Data":"6b9bef6c2a2f0d358bd9c4c4151dc826d7189334c2a498f64f353afe2669d579"} Dec 08 21:24:37 crc kubenswrapper[4912]: I1208 21:24:37.860396 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2vsmr"] Dec 08 21:24:37 crc kubenswrapper[4912]: I1208 21:24:37.861594 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:37 crc kubenswrapper[4912]: I1208 21:24:37.863602 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 08 21:24:37 crc kubenswrapper[4912]: I1208 21:24:37.874120 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2vsmr"] Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.015549 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f24ef83a-a9e3-4bc2-931c-92673a9e6347-catalog-content\") pod \"redhat-marketplace-2vsmr\" (UID: \"f24ef83a-a9e3-4bc2-931c-92673a9e6347\") " pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.015628 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sntss\" (UniqueName: \"kubernetes.io/projected/f24ef83a-a9e3-4bc2-931c-92673a9e6347-kube-api-access-sntss\") pod \"redhat-marketplace-2vsmr\" (UID: \"f24ef83a-a9e3-4bc2-931c-92673a9e6347\") " pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.015765 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f24ef83a-a9e3-4bc2-931c-92673a9e6347-utilities\") pod \"redhat-marketplace-2vsmr\" (UID: \"f24ef83a-a9e3-4bc2-931c-92673a9e6347\") " pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.117101 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f24ef83a-a9e3-4bc2-931c-92673a9e6347-catalog-content\") pod \"redhat-marketplace-2vsmr\" (UID: \"f24ef83a-a9e3-4bc2-931c-92673a9e6347\") " pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.117191 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sntss\" (UniqueName: \"kubernetes.io/projected/f24ef83a-a9e3-4bc2-931c-92673a9e6347-kube-api-access-sntss\") pod \"redhat-marketplace-2vsmr\" (UID: \"f24ef83a-a9e3-4bc2-931c-92673a9e6347\") " pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.117303 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f24ef83a-a9e3-4bc2-931c-92673a9e6347-utilities\") pod \"redhat-marketplace-2vsmr\" (UID: \"f24ef83a-a9e3-4bc2-931c-92673a9e6347\") " pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.117724 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f24ef83a-a9e3-4bc2-931c-92673a9e6347-catalog-content\") pod \"redhat-marketplace-2vsmr\" (UID: \"f24ef83a-a9e3-4bc2-931c-92673a9e6347\") " pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.118019 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f24ef83a-a9e3-4bc2-931c-92673a9e6347-utilities\") pod \"redhat-marketplace-2vsmr\" (UID: \"f24ef83a-a9e3-4bc2-931c-92673a9e6347\") " pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.142859 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sntss\" (UniqueName: \"kubernetes.io/projected/f24ef83a-a9e3-4bc2-931c-92673a9e6347-kube-api-access-sntss\") pod \"redhat-marketplace-2vsmr\" (UID: \"f24ef83a-a9e3-4bc2-931c-92673a9e6347\") " pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.234760 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.712552 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2vsmr"] Dec 08 21:24:38 crc kubenswrapper[4912]: W1208 21:24:38.721973 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf24ef83a_a9e3_4bc2_931c_92673a9e6347.slice/crio-0a5565b64998758b076e75d14bbc9ed5d3c38e81229afeb74d077c3915d6f663 WatchSource:0}: Error finding container 0a5565b64998758b076e75d14bbc9ed5d3c38e81229afeb74d077c3915d6f663: Status 404 returned error can't find the container with id 0a5565b64998758b076e75d14bbc9ed5d3c38e81229afeb74d077c3915d6f663 Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.790623 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gjqv9" event={"ID":"b9834161-a62c-4258-963d-3216a0f2d185","Type":"ContainerStarted","Data":"fa2879a37a04a51fa3a6be0a4a8decd0be4b2510b99f49993297905e7c58b92c"} Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.797503 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vjr7b" event={"ID":"e29efa51-b798-4e0a-bf88-27affe2b33ab","Type":"ContainerStarted","Data":"6f9887c64aa4f63599bd87b413ca68571f8bb9d2be6b6be3b50d11ef33a32f74"} Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.799716 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2vsmr" event={"ID":"f24ef83a-a9e3-4bc2-931c-92673a9e6347","Type":"ContainerStarted","Data":"0a5565b64998758b076e75d14bbc9ed5d3c38e81229afeb74d077c3915d6f663"} Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.802539 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gc68w" event={"ID":"66f84380-1e3e-4023-b2be-78c959851aef","Type":"ContainerStarted","Data":"a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c"} Dec 08 21:24:38 crc kubenswrapper[4912]: I1208 21:24:38.828301 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gc68w" podStartSLOduration=3.373764281 podStartE2EDuration="5.828277786s" podCreationTimestamp="2025-12-08 21:24:33 +0000 UTC" firstStartedPulling="2025-12-08 21:24:35.72547597 +0000 UTC m=+357.588478053" lastFinishedPulling="2025-12-08 21:24:38.179989475 +0000 UTC m=+360.042991558" observedRunningTime="2025-12-08 21:24:38.827269802 +0000 UTC m=+360.690271895" watchObservedRunningTime="2025-12-08 21:24:38.828277786 +0000 UTC m=+360.691279869" Dec 08 21:24:39 crc kubenswrapper[4912]: I1208 21:24:39.836325 4912 generic.go:334] "Generic (PLEG): container finished" podID="b9834161-a62c-4258-963d-3216a0f2d185" containerID="fa2879a37a04a51fa3a6be0a4a8decd0be4b2510b99f49993297905e7c58b92c" exitCode=0 Dec 08 21:24:39 crc kubenswrapper[4912]: I1208 21:24:39.836825 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gjqv9" event={"ID":"b9834161-a62c-4258-963d-3216a0f2d185","Type":"ContainerDied","Data":"fa2879a37a04a51fa3a6be0a4a8decd0be4b2510b99f49993297905e7c58b92c"} Dec 08 21:24:39 crc kubenswrapper[4912]: I1208 21:24:39.839743 4912 generic.go:334] "Generic (PLEG): container finished" podID="f24ef83a-a9e3-4bc2-931c-92673a9e6347" containerID="7712d554f6ff66dfe688b4d2dda7cdb31182210376bbfae6c482b197662f2dbb" exitCode=0 Dec 08 21:24:39 crc kubenswrapper[4912]: I1208 21:24:39.839842 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2vsmr" event={"ID":"f24ef83a-a9e3-4bc2-931c-92673a9e6347","Type":"ContainerDied","Data":"7712d554f6ff66dfe688b4d2dda7cdb31182210376bbfae6c482b197662f2dbb"} Dec 08 21:24:39 crc kubenswrapper[4912]: I1208 21:24:39.864548 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vjr7b" podStartSLOduration=3.312574381 podStartE2EDuration="4.864517186s" podCreationTimestamp="2025-12-08 21:24:35 +0000 UTC" firstStartedPulling="2025-12-08 21:24:36.759288445 +0000 UTC m=+358.622290528" lastFinishedPulling="2025-12-08 21:24:38.31123125 +0000 UTC m=+360.174233333" observedRunningTime="2025-12-08 21:24:38.848860841 +0000 UTC m=+360.711862934" watchObservedRunningTime="2025-12-08 21:24:39.864517186 +0000 UTC m=+361.727519269" Dec 08 21:24:40 crc kubenswrapper[4912]: I1208 21:24:40.660398 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-cl4bh" Dec 08 21:24:40 crc kubenswrapper[4912]: I1208 21:24:40.738889 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-45v4h"] Dec 08 21:24:41 crc kubenswrapper[4912]: I1208 21:24:41.882817 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gjqv9" event={"ID":"b9834161-a62c-4258-963d-3216a0f2d185","Type":"ContainerStarted","Data":"a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46"} Dec 08 21:24:41 crc kubenswrapper[4912]: I1208 21:24:41.886387 4912 generic.go:334] "Generic (PLEG): container finished" podID="f24ef83a-a9e3-4bc2-931c-92673a9e6347" containerID="a7f824c714155dbb6d6a69c842f406b5ddaa1890a37a61a5086358269a9303c9" exitCode=0 Dec 08 21:24:41 crc kubenswrapper[4912]: I1208 21:24:41.886445 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2vsmr" event={"ID":"f24ef83a-a9e3-4bc2-931c-92673a9e6347","Type":"ContainerDied","Data":"a7f824c714155dbb6d6a69c842f406b5ddaa1890a37a61a5086358269a9303c9"} Dec 08 21:24:41 crc kubenswrapper[4912]: I1208 21:24:41.909380 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gjqv9" podStartSLOduration=3.430932941 podStartE2EDuration="5.909345038s" podCreationTimestamp="2025-12-08 21:24:36 +0000 UTC" firstStartedPulling="2025-12-08 21:24:37.768137131 +0000 UTC m=+359.631139224" lastFinishedPulling="2025-12-08 21:24:40.246549228 +0000 UTC m=+362.109551321" observedRunningTime="2025-12-08 21:24:41.907159897 +0000 UTC m=+363.770161980" watchObservedRunningTime="2025-12-08 21:24:41.909345038 +0000 UTC m=+363.772347121" Dec 08 21:24:42 crc kubenswrapper[4912]: I1208 21:24:42.895807 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2vsmr" event={"ID":"f24ef83a-a9e3-4bc2-931c-92673a9e6347","Type":"ContainerStarted","Data":"03a589fa1a2c56dca2495fbd9dfeead458fccc85a9dc9a5bfac0ed8600fc98f3"} Dec 08 21:24:42 crc kubenswrapper[4912]: I1208 21:24:42.930905 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2vsmr" podStartSLOduration=3.176400172 podStartE2EDuration="5.930877252s" podCreationTimestamp="2025-12-08 21:24:37 +0000 UTC" firstStartedPulling="2025-12-08 21:24:39.841601566 +0000 UTC m=+361.704603649" lastFinishedPulling="2025-12-08 21:24:42.596078646 +0000 UTC m=+364.459080729" observedRunningTime="2025-12-08 21:24:42.919504884 +0000 UTC m=+364.782506967" watchObservedRunningTime="2025-12-08 21:24:42.930877252 +0000 UTC m=+364.793879335" Dec 08 21:24:43 crc kubenswrapper[4912]: I1208 21:24:43.985719 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:43 crc kubenswrapper[4912]: I1208 21:24:43.987563 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:45 crc kubenswrapper[4912]: I1208 21:24:45.041176 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gc68w" podUID="66f84380-1e3e-4023-b2be-78c959851aef" containerName="registry-server" probeResult="failure" output=< Dec 08 21:24:45 crc kubenswrapper[4912]: timeout: failed to connect service ":50051" within 1s Dec 08 21:24:45 crc kubenswrapper[4912]: > Dec 08 21:24:45 crc kubenswrapper[4912]: I1208 21:24:45.776499 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:45 crc kubenswrapper[4912]: I1208 21:24:45.776856 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:45 crc kubenswrapper[4912]: I1208 21:24:45.831536 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:45 crc kubenswrapper[4912]: I1208 21:24:45.957345 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vjr7b" Dec 08 21:24:46 crc kubenswrapper[4912]: I1208 21:24:46.391210 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:46 crc kubenswrapper[4912]: I1208 21:24:46.391579 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:46 crc kubenswrapper[4912]: I1208 21:24:46.439983 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:46 crc kubenswrapper[4912]: I1208 21:24:46.959219 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:24:48 crc kubenswrapper[4912]: I1208 21:24:48.236057 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:48 crc kubenswrapper[4912]: I1208 21:24:48.236418 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:48 crc kubenswrapper[4912]: I1208 21:24:48.272100 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:48 crc kubenswrapper[4912]: I1208 21:24:48.978172 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2vsmr" Dec 08 21:24:54 crc kubenswrapper[4912]: I1208 21:24:54.030129 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:24:54 crc kubenswrapper[4912]: I1208 21:24:54.074750 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:25:02 crc kubenswrapper[4912]: I1208 21:25:02.964886 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:25:02 crc kubenswrapper[4912]: I1208 21:25:02.965660 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:25:05 crc kubenswrapper[4912]: I1208 21:25:05.786166 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" podUID="c5facd66-e234-44bc-b3b5-36f9860d98d1" containerName="registry" containerID="cri-o://7e4a13381d543d0c13ebca42391d58776999d266f4cf020238d79ebf8696346e" gracePeriod=30 Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.036223 4912 generic.go:334] "Generic (PLEG): container finished" podID="c5facd66-e234-44bc-b3b5-36f9860d98d1" containerID="7e4a13381d543d0c13ebca42391d58776999d266f4cf020238d79ebf8696346e" exitCode=0 Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.036455 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" event={"ID":"c5facd66-e234-44bc-b3b5-36f9860d98d1","Type":"ContainerDied","Data":"7e4a13381d543d0c13ebca42391d58776999d266f4cf020238d79ebf8696346e"} Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.186118 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.236365 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-registry-tls\") pod \"c5facd66-e234-44bc-b3b5-36f9860d98d1\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.236492 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c5facd66-e234-44bc-b3b5-36f9860d98d1-registry-certificates\") pod \"c5facd66-e234-44bc-b3b5-36f9860d98d1\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.236927 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"c5facd66-e234-44bc-b3b5-36f9860d98d1\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.237000 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c5facd66-e234-44bc-b3b5-36f9860d98d1-trusted-ca\") pod \"c5facd66-e234-44bc-b3b5-36f9860d98d1\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.237112 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c5facd66-e234-44bc-b3b5-36f9860d98d1-ca-trust-extracted\") pod \"c5facd66-e234-44bc-b3b5-36f9860d98d1\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.237189 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c5facd66-e234-44bc-b3b5-36f9860d98d1-installation-pull-secrets\") pod \"c5facd66-e234-44bc-b3b5-36f9860d98d1\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.237250 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-bound-sa-token\") pod \"c5facd66-e234-44bc-b3b5-36f9860d98d1\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.237358 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47vnk\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-kube-api-access-47vnk\") pod \"c5facd66-e234-44bc-b3b5-36f9860d98d1\" (UID: \"c5facd66-e234-44bc-b3b5-36f9860d98d1\") " Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.242224 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5facd66-e234-44bc-b3b5-36f9860d98d1-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "c5facd66-e234-44bc-b3b5-36f9860d98d1" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.243170 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5facd66-e234-44bc-b3b5-36f9860d98d1-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "c5facd66-e234-44bc-b3b5-36f9860d98d1" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.247990 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "c5facd66-e234-44bc-b3b5-36f9860d98d1" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.248915 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "c5facd66-e234-44bc-b3b5-36f9860d98d1" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.250644 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5facd66-e234-44bc-b3b5-36f9860d98d1-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "c5facd66-e234-44bc-b3b5-36f9860d98d1" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.252946 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "c5facd66-e234-44bc-b3b5-36f9860d98d1" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.255334 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-kube-api-access-47vnk" (OuterVolumeSpecName: "kube-api-access-47vnk") pod "c5facd66-e234-44bc-b3b5-36f9860d98d1" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1"). InnerVolumeSpecName "kube-api-access-47vnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.256609 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5facd66-e234-44bc-b3b5-36f9860d98d1-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "c5facd66-e234-44bc-b3b5-36f9860d98d1" (UID: "c5facd66-e234-44bc-b3b5-36f9860d98d1"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.339655 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47vnk\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-kube-api-access-47vnk\") on node \"crc\" DevicePath \"\"" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.339705 4912 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.339720 4912 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c5facd66-e234-44bc-b3b5-36f9860d98d1-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.339731 4912 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c5facd66-e234-44bc-b3b5-36f9860d98d1-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.339743 4912 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c5facd66-e234-44bc-b3b5-36f9860d98d1-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.339755 4912 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c5facd66-e234-44bc-b3b5-36f9860d98d1-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 08 21:25:06 crc kubenswrapper[4912]: I1208 21:25:06.339765 4912 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c5facd66-e234-44bc-b3b5-36f9860d98d1-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 21:25:07 crc kubenswrapper[4912]: I1208 21:25:07.046585 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" event={"ID":"c5facd66-e234-44bc-b3b5-36f9860d98d1","Type":"ContainerDied","Data":"f012ea9994c5f156920eabbb4afc59021f1dda63a02ca079c23aa72610e9828c"} Dec 08 21:25:07 crc kubenswrapper[4912]: I1208 21:25:07.046671 4912 scope.go:117] "RemoveContainer" containerID="7e4a13381d543d0c13ebca42391d58776999d266f4cf020238d79ebf8696346e" Dec 08 21:25:07 crc kubenswrapper[4912]: I1208 21:25:07.046689 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-45v4h" Dec 08 21:25:07 crc kubenswrapper[4912]: I1208 21:25:07.068915 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-45v4h"] Dec 08 21:25:07 crc kubenswrapper[4912]: I1208 21:25:07.076866 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-45v4h"] Dec 08 21:25:08 crc kubenswrapper[4912]: I1208 21:25:08.437005 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5facd66-e234-44bc-b3b5-36f9860d98d1" path="/var/lib/kubelet/pods/c5facd66-e234-44bc-b3b5-36f9860d98d1/volumes" Dec 08 21:25:32 crc kubenswrapper[4912]: I1208 21:25:32.965774 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:25:32 crc kubenswrapper[4912]: I1208 21:25:32.966733 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:25:32 crc kubenswrapper[4912]: I1208 21:25:32.966851 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:25:32 crc kubenswrapper[4912]: I1208 21:25:32.968217 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b70b35a710e0841421668ee91c5fb87a0a160d5cb5d16fa7a42c84bc217e8960"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:25:32 crc kubenswrapper[4912]: I1208 21:25:32.968369 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://b70b35a710e0841421668ee91c5fb87a0a160d5cb5d16fa7a42c84bc217e8960" gracePeriod=600 Dec 08 21:25:33 crc kubenswrapper[4912]: I1208 21:25:33.221613 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="b70b35a710e0841421668ee91c5fb87a0a160d5cb5d16fa7a42c84bc217e8960" exitCode=0 Dec 08 21:25:33 crc kubenswrapper[4912]: I1208 21:25:33.221674 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"b70b35a710e0841421668ee91c5fb87a0a160d5cb5d16fa7a42c84bc217e8960"} Dec 08 21:25:33 crc kubenswrapper[4912]: I1208 21:25:33.221928 4912 scope.go:117] "RemoveContainer" containerID="0e7c12588a2a5394a0ac3187977931f6255f771fe5ab6f1e4f272ad02affe1ed" Dec 08 21:25:34 crc kubenswrapper[4912]: I1208 21:25:34.233425 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"6f5ad71830b88e4775860604df0b35d82fd83908839551688064bd6336508a17"} Dec 08 21:27:38 crc kubenswrapper[4912]: I1208 21:27:38.717448 4912 scope.go:117] "RemoveContainer" containerID="ee09fbe83e452028cbbd60308aed9aaebc123e7c807aca52e6620e413d85730a" Dec 08 21:28:02 crc kubenswrapper[4912]: I1208 21:28:02.964772 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:28:02 crc kubenswrapper[4912]: I1208 21:28:02.965296 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:28:32 crc kubenswrapper[4912]: I1208 21:28:32.965235 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:28:32 crc kubenswrapper[4912]: I1208 21:28:32.966203 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:28:38 crc kubenswrapper[4912]: I1208 21:28:38.747349 4912 scope.go:117] "RemoveContainer" containerID="8c7e772e2b96605fed8c87df1885ba2da376da5d4e0d59b7b5fa564ff82eacee" Dec 08 21:28:38 crc kubenswrapper[4912]: I1208 21:28:38.793057 4912 scope.go:117] "RemoveContainer" containerID="7e1f4fd4a8f2f7f71c8cd04d5a35636dc0fc97ff2837f981c6a1ad5ae96ede9f" Dec 08 21:29:02 crc kubenswrapper[4912]: I1208 21:29:02.964909 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:29:02 crc kubenswrapper[4912]: I1208 21:29:02.965530 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:29:02 crc kubenswrapper[4912]: I1208 21:29:02.965599 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:29:02 crc kubenswrapper[4912]: I1208 21:29:02.966458 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6f5ad71830b88e4775860604df0b35d82fd83908839551688064bd6336508a17"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:29:02 crc kubenswrapper[4912]: I1208 21:29:02.966556 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://6f5ad71830b88e4775860604df0b35d82fd83908839551688064bd6336508a17" gracePeriod=600 Dec 08 21:29:03 crc kubenswrapper[4912]: I1208 21:29:03.622831 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="6f5ad71830b88e4775860604df0b35d82fd83908839551688064bd6336508a17" exitCode=0 Dec 08 21:29:03 crc kubenswrapper[4912]: I1208 21:29:03.622977 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"6f5ad71830b88e4775860604df0b35d82fd83908839551688064bd6336508a17"} Dec 08 21:29:03 crc kubenswrapper[4912]: I1208 21:29:03.623301 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"8106664a8b2aa8fe44f0e624ddefd9ea7c76fb2bf84c756329c17f67bc09391d"} Dec 08 21:29:03 crc kubenswrapper[4912]: I1208 21:29:03.623329 4912 scope.go:117] "RemoveContainer" containerID="b70b35a710e0841421668ee91c5fb87a0a160d5cb5d16fa7a42c84bc217e8960" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.181657 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz"] Dec 08 21:30:00 crc kubenswrapper[4912]: E1208 21:30:00.183541 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5facd66-e234-44bc-b3b5-36f9860d98d1" containerName="registry" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.183575 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5facd66-e234-44bc-b3b5-36f9860d98d1" containerName="registry" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.183687 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5facd66-e234-44bc-b3b5-36f9860d98d1" containerName="registry" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.184352 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.186758 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.187302 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.194961 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz"] Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.217498 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/71f0bde6-5047-441f-ab47-d77e824847c1-secret-volume\") pod \"collect-profiles-29420490-sh8gz\" (UID: \"71f0bde6-5047-441f-ab47-d77e824847c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.217601 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpvkk\" (UniqueName: \"kubernetes.io/projected/71f0bde6-5047-441f-ab47-d77e824847c1-kube-api-access-rpvkk\") pod \"collect-profiles-29420490-sh8gz\" (UID: \"71f0bde6-5047-441f-ab47-d77e824847c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.217665 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71f0bde6-5047-441f-ab47-d77e824847c1-config-volume\") pod \"collect-profiles-29420490-sh8gz\" (UID: \"71f0bde6-5047-441f-ab47-d77e824847c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.319260 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71f0bde6-5047-441f-ab47-d77e824847c1-config-volume\") pod \"collect-profiles-29420490-sh8gz\" (UID: \"71f0bde6-5047-441f-ab47-d77e824847c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.319404 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/71f0bde6-5047-441f-ab47-d77e824847c1-secret-volume\") pod \"collect-profiles-29420490-sh8gz\" (UID: \"71f0bde6-5047-441f-ab47-d77e824847c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.319452 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpvkk\" (UniqueName: \"kubernetes.io/projected/71f0bde6-5047-441f-ab47-d77e824847c1-kube-api-access-rpvkk\") pod \"collect-profiles-29420490-sh8gz\" (UID: \"71f0bde6-5047-441f-ab47-d77e824847c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.320234 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71f0bde6-5047-441f-ab47-d77e824847c1-config-volume\") pod \"collect-profiles-29420490-sh8gz\" (UID: \"71f0bde6-5047-441f-ab47-d77e824847c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.344354 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/71f0bde6-5047-441f-ab47-d77e824847c1-secret-volume\") pod \"collect-profiles-29420490-sh8gz\" (UID: \"71f0bde6-5047-441f-ab47-d77e824847c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.349991 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpvkk\" (UniqueName: \"kubernetes.io/projected/71f0bde6-5047-441f-ab47-d77e824847c1-kube-api-access-rpvkk\") pod \"collect-profiles-29420490-sh8gz\" (UID: \"71f0bde6-5047-441f-ab47-d77e824847c1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.509333 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:00 crc kubenswrapper[4912]: I1208 21:30:00.687766 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz"] Dec 08 21:30:01 crc kubenswrapper[4912]: I1208 21:30:01.052182 4912 generic.go:334] "Generic (PLEG): container finished" podID="71f0bde6-5047-441f-ab47-d77e824847c1" containerID="5825af5c0710d86d981d66a5d877dda8d9a59849de68d05aca05b478c63cbb05" exitCode=0 Dec 08 21:30:01 crc kubenswrapper[4912]: I1208 21:30:01.052252 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" event={"ID":"71f0bde6-5047-441f-ab47-d77e824847c1","Type":"ContainerDied","Data":"5825af5c0710d86d981d66a5d877dda8d9a59849de68d05aca05b478c63cbb05"} Dec 08 21:30:01 crc kubenswrapper[4912]: I1208 21:30:01.052528 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" event={"ID":"71f0bde6-5047-441f-ab47-d77e824847c1","Type":"ContainerStarted","Data":"ea16580ed47e26a540623b8ab561f232ed279e77f230a7cfc19ce7bd4ce88da5"} Dec 08 21:30:02 crc kubenswrapper[4912]: I1208 21:30:02.258336 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:02 crc kubenswrapper[4912]: I1208 21:30:02.447478 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpvkk\" (UniqueName: \"kubernetes.io/projected/71f0bde6-5047-441f-ab47-d77e824847c1-kube-api-access-rpvkk\") pod \"71f0bde6-5047-441f-ab47-d77e824847c1\" (UID: \"71f0bde6-5047-441f-ab47-d77e824847c1\") " Dec 08 21:30:02 crc kubenswrapper[4912]: I1208 21:30:02.447916 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/71f0bde6-5047-441f-ab47-d77e824847c1-secret-volume\") pod \"71f0bde6-5047-441f-ab47-d77e824847c1\" (UID: \"71f0bde6-5047-441f-ab47-d77e824847c1\") " Dec 08 21:30:02 crc kubenswrapper[4912]: I1208 21:30:02.447960 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71f0bde6-5047-441f-ab47-d77e824847c1-config-volume\") pod \"71f0bde6-5047-441f-ab47-d77e824847c1\" (UID: \"71f0bde6-5047-441f-ab47-d77e824847c1\") " Dec 08 21:30:02 crc kubenswrapper[4912]: I1208 21:30:02.448768 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71f0bde6-5047-441f-ab47-d77e824847c1-config-volume" (OuterVolumeSpecName: "config-volume") pod "71f0bde6-5047-441f-ab47-d77e824847c1" (UID: "71f0bde6-5047-441f-ab47-d77e824847c1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:30:02 crc kubenswrapper[4912]: I1208 21:30:02.452754 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71f0bde6-5047-441f-ab47-d77e824847c1-kube-api-access-rpvkk" (OuterVolumeSpecName: "kube-api-access-rpvkk") pod "71f0bde6-5047-441f-ab47-d77e824847c1" (UID: "71f0bde6-5047-441f-ab47-d77e824847c1"). InnerVolumeSpecName "kube-api-access-rpvkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:30:02 crc kubenswrapper[4912]: I1208 21:30:02.452922 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71f0bde6-5047-441f-ab47-d77e824847c1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "71f0bde6-5047-441f-ab47-d77e824847c1" (UID: "71f0bde6-5047-441f-ab47-d77e824847c1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:30:02 crc kubenswrapper[4912]: I1208 21:30:02.549879 4912 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/71f0bde6-5047-441f-ab47-d77e824847c1-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:02 crc kubenswrapper[4912]: I1208 21:30:02.549919 4912 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71f0bde6-5047-441f-ab47-d77e824847c1-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:02 crc kubenswrapper[4912]: I1208 21:30:02.549937 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpvkk\" (UniqueName: \"kubernetes.io/projected/71f0bde6-5047-441f-ab47-d77e824847c1-kube-api-access-rpvkk\") on node \"crc\" DevicePath \"\"" Dec 08 21:30:03 crc kubenswrapper[4912]: I1208 21:30:03.065379 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" Dec 08 21:30:03 crc kubenswrapper[4912]: I1208 21:30:03.065372 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz" event={"ID":"71f0bde6-5047-441f-ab47-d77e824847c1","Type":"ContainerDied","Data":"ea16580ed47e26a540623b8ab561f232ed279e77f230a7cfc19ce7bd4ce88da5"} Dec 08 21:30:03 crc kubenswrapper[4912]: I1208 21:30:03.065478 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea16580ed47e26a540623b8ab561f232ed279e77f230a7cfc19ce7bd4ce88da5" Dec 08 21:30:59 crc kubenswrapper[4912]: I1208 21:30:59.930829 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c"] Dec 08 21:30:59 crc kubenswrapper[4912]: E1208 21:30:59.932025 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71f0bde6-5047-441f-ab47-d77e824847c1" containerName="collect-profiles" Dec 08 21:30:59 crc kubenswrapper[4912]: I1208 21:30:59.932071 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="71f0bde6-5047-441f-ab47-d77e824847c1" containerName="collect-profiles" Dec 08 21:30:59 crc kubenswrapper[4912]: I1208 21:30:59.932239 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="71f0bde6-5047-441f-ab47-d77e824847c1" containerName="collect-profiles" Dec 08 21:30:59 crc kubenswrapper[4912]: I1208 21:30:59.933322 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:30:59 crc kubenswrapper[4912]: I1208 21:30:59.940561 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 08 21:30:59 crc kubenswrapper[4912]: I1208 21:30:59.945028 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a3f1f4ed-5050-40db-9fe0-7979c52368e0-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c\" (UID: \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:30:59 crc kubenswrapper[4912]: I1208 21:30:59.945100 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a3f1f4ed-5050-40db-9fe0-7979c52368e0-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c\" (UID: \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:30:59 crc kubenswrapper[4912]: I1208 21:30:59.945140 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9dzq\" (UniqueName: \"kubernetes.io/projected/a3f1f4ed-5050-40db-9fe0-7979c52368e0-kube-api-access-t9dzq\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c\" (UID: \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:30:59 crc kubenswrapper[4912]: I1208 21:30:59.945584 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c"] Dec 08 21:31:00 crc kubenswrapper[4912]: I1208 21:31:00.046784 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a3f1f4ed-5050-40db-9fe0-7979c52368e0-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c\" (UID: \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:31:00 crc kubenswrapper[4912]: I1208 21:31:00.046848 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9dzq\" (UniqueName: \"kubernetes.io/projected/a3f1f4ed-5050-40db-9fe0-7979c52368e0-kube-api-access-t9dzq\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c\" (UID: \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:31:00 crc kubenswrapper[4912]: I1208 21:31:00.046928 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a3f1f4ed-5050-40db-9fe0-7979c52368e0-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c\" (UID: \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:31:00 crc kubenswrapper[4912]: I1208 21:31:00.047587 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a3f1f4ed-5050-40db-9fe0-7979c52368e0-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c\" (UID: \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:31:00 crc kubenswrapper[4912]: I1208 21:31:00.047599 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a3f1f4ed-5050-40db-9fe0-7979c52368e0-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c\" (UID: \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:31:00 crc kubenswrapper[4912]: I1208 21:31:00.069205 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9dzq\" (UniqueName: \"kubernetes.io/projected/a3f1f4ed-5050-40db-9fe0-7979c52368e0-kube-api-access-t9dzq\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c\" (UID: \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:31:00 crc kubenswrapper[4912]: I1208 21:31:00.251564 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:31:00 crc kubenswrapper[4912]: I1208 21:31:00.606801 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c"] Dec 08 21:31:01 crc kubenswrapper[4912]: I1208 21:31:01.431541 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" event={"ID":"a3f1f4ed-5050-40db-9fe0-7979c52368e0","Type":"ContainerStarted","Data":"d1435772f83c9d19b459c13bdd176a80646d81e5744ea280ddce5ca31606efda"} Dec 08 21:31:01 crc kubenswrapper[4912]: I1208 21:31:01.431938 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" event={"ID":"a3f1f4ed-5050-40db-9fe0-7979c52368e0","Type":"ContainerStarted","Data":"11e6109436a607f5372f2980fbb5700ecb4ff1817d09c59b87b47920e1f4f7c5"} Dec 08 21:31:02 crc kubenswrapper[4912]: I1208 21:31:02.436731 4912 generic.go:334] "Generic (PLEG): container finished" podID="a3f1f4ed-5050-40db-9fe0-7979c52368e0" containerID="d1435772f83c9d19b459c13bdd176a80646d81e5744ea280ddce5ca31606efda" exitCode=0 Dec 08 21:31:02 crc kubenswrapper[4912]: I1208 21:31:02.436796 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" event={"ID":"a3f1f4ed-5050-40db-9fe0-7979c52368e0","Type":"ContainerDied","Data":"d1435772f83c9d19b459c13bdd176a80646d81e5744ea280ddce5ca31606efda"} Dec 08 21:31:02 crc kubenswrapper[4912]: I1208 21:31:02.439176 4912 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 21:31:04 crc kubenswrapper[4912]: I1208 21:31:04.456856 4912 generic.go:334] "Generic (PLEG): container finished" podID="a3f1f4ed-5050-40db-9fe0-7979c52368e0" containerID="8d0faaa1196fbdab7c4883c9fce67e58dcd9d136782ef599552ee9ecb316bf35" exitCode=0 Dec 08 21:31:04 crc kubenswrapper[4912]: I1208 21:31:04.457231 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" event={"ID":"a3f1f4ed-5050-40db-9fe0-7979c52368e0","Type":"ContainerDied","Data":"8d0faaa1196fbdab7c4883c9fce67e58dcd9d136782ef599552ee9ecb316bf35"} Dec 08 21:31:05 crc kubenswrapper[4912]: I1208 21:31:05.482091 4912 generic.go:334] "Generic (PLEG): container finished" podID="a3f1f4ed-5050-40db-9fe0-7979c52368e0" containerID="2e71b22814130c75cf6f8690d39a9077a86ba0d952d17a94e10a6974b80d4729" exitCode=0 Dec 08 21:31:05 crc kubenswrapper[4912]: I1208 21:31:05.482260 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" event={"ID":"a3f1f4ed-5050-40db-9fe0-7979c52368e0","Type":"ContainerDied","Data":"2e71b22814130c75cf6f8690d39a9077a86ba0d952d17a94e10a6974b80d4729"} Dec 08 21:31:06 crc kubenswrapper[4912]: I1208 21:31:06.757536 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:31:06 crc kubenswrapper[4912]: I1208 21:31:06.893534 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9dzq\" (UniqueName: \"kubernetes.io/projected/a3f1f4ed-5050-40db-9fe0-7979c52368e0-kube-api-access-t9dzq\") pod \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\" (UID: \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\") " Dec 08 21:31:06 crc kubenswrapper[4912]: I1208 21:31:06.893649 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a3f1f4ed-5050-40db-9fe0-7979c52368e0-util\") pod \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\" (UID: \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\") " Dec 08 21:31:06 crc kubenswrapper[4912]: I1208 21:31:06.893714 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a3f1f4ed-5050-40db-9fe0-7979c52368e0-bundle\") pod \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\" (UID: \"a3f1f4ed-5050-40db-9fe0-7979c52368e0\") " Dec 08 21:31:06 crc kubenswrapper[4912]: I1208 21:31:06.897177 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3f1f4ed-5050-40db-9fe0-7979c52368e0-bundle" (OuterVolumeSpecName: "bundle") pod "a3f1f4ed-5050-40db-9fe0-7979c52368e0" (UID: "a3f1f4ed-5050-40db-9fe0-7979c52368e0"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:31:06 crc kubenswrapper[4912]: I1208 21:31:06.901142 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3f1f4ed-5050-40db-9fe0-7979c52368e0-kube-api-access-t9dzq" (OuterVolumeSpecName: "kube-api-access-t9dzq") pod "a3f1f4ed-5050-40db-9fe0-7979c52368e0" (UID: "a3f1f4ed-5050-40db-9fe0-7979c52368e0"). InnerVolumeSpecName "kube-api-access-t9dzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:31:06 crc kubenswrapper[4912]: I1208 21:31:06.907863 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3f1f4ed-5050-40db-9fe0-7979c52368e0-util" (OuterVolumeSpecName: "util") pod "a3f1f4ed-5050-40db-9fe0-7979c52368e0" (UID: "a3f1f4ed-5050-40db-9fe0-7979c52368e0"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:31:06 crc kubenswrapper[4912]: I1208 21:31:06.995247 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9dzq\" (UniqueName: \"kubernetes.io/projected/a3f1f4ed-5050-40db-9fe0-7979c52368e0-kube-api-access-t9dzq\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:06 crc kubenswrapper[4912]: I1208 21:31:06.995296 4912 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a3f1f4ed-5050-40db-9fe0-7979c52368e0-util\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:06 crc kubenswrapper[4912]: I1208 21:31:06.995306 4912 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a3f1f4ed-5050-40db-9fe0-7979c52368e0-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:07 crc kubenswrapper[4912]: I1208 21:31:07.498205 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" event={"ID":"a3f1f4ed-5050-40db-9fe0-7979c52368e0","Type":"ContainerDied","Data":"11e6109436a607f5372f2980fbb5700ecb4ff1817d09c59b87b47920e1f4f7c5"} Dec 08 21:31:07 crc kubenswrapper[4912]: I1208 21:31:07.498256 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11e6109436a607f5372f2980fbb5700ecb4ff1817d09c59b87b47920e1f4f7c5" Dec 08 21:31:07 crc kubenswrapper[4912]: I1208 21:31:07.498674 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c" Dec 08 21:31:10 crc kubenswrapper[4912]: I1208 21:31:10.646011 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7qdqq"] Dec 08 21:31:10 crc kubenswrapper[4912]: I1208 21:31:10.648817 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovn-controller" containerID="cri-o://79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096" gracePeriod=30 Dec 08 21:31:10 crc kubenswrapper[4912]: I1208 21:31:10.649396 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="sbdb" containerID="cri-o://40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8" gracePeriod=30 Dec 08 21:31:10 crc kubenswrapper[4912]: I1208 21:31:10.649490 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="kube-rbac-proxy-node" containerID="cri-o://1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03" gracePeriod=30 Dec 08 21:31:10 crc kubenswrapper[4912]: I1208 21:31:10.649538 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="northd" containerID="cri-o://4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4" gracePeriod=30 Dec 08 21:31:10 crc kubenswrapper[4912]: I1208 21:31:10.649573 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f" gracePeriod=30 Dec 08 21:31:10 crc kubenswrapper[4912]: I1208 21:31:10.649609 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovn-acl-logging" containerID="cri-o://8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047" gracePeriod=30 Dec 08 21:31:10 crc kubenswrapper[4912]: I1208 21:31:10.648871 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="nbdb" containerID="cri-o://1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45" gracePeriod=30 Dec 08 21:31:10 crc kubenswrapper[4912]: I1208 21:31:10.694173 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" containerID="cri-o://f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab" gracePeriod=30 Dec 08 21:31:10 crc kubenswrapper[4912]: I1208 21:31:10.819703 4912 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.019577 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/3.log" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.022239 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovn-acl-logging/0.log" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.022864 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovn-controller/0.log" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.023445 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.049333 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovnkube-config\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.049706 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovn-node-metrics-cert\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.049804 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.049965 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-run-netns\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.050197 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovnkube-script-lib\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.050320 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkffq\" (UniqueName: \"kubernetes.io/projected/57520f45-3ab9-41ea-8a10-3fa74c02f04b-kube-api-access-mkffq\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.050432 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-systemd-units\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.050073 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.050116 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.050128 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.050562 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.050910 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.050518 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-openvswitch\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.051152 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-run-ovn-kubernetes\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.051325 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-kubelet\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.051458 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-cni-bin\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.051624 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-ovn\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.051814 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-log-socket\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.051965 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-var-lib-openvswitch\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.052067 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-node-log\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.052187 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-etc-openvswitch\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.052269 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-slash\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.052355 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-cni-netd\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.052652 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-env-overrides\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.052758 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-systemd\") pod \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\" (UID: \"57520f45-3ab9-41ea-8a10-3fa74c02f04b\") " Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.054378 4912 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.054651 4912 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.054749 4912 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.054822 4912 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.054905 4912 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.051277 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.051387 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.051572 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.051688 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.051725 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.051920 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-log-socket" (OuterVolumeSpecName: "log-socket") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.052459 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-node-log" (OuterVolumeSpecName: "node-log") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.052485 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.052510 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.052559 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.052579 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-slash" (OuterVolumeSpecName: "host-slash") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.063320 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57520f45-3ab9-41ea-8a10-3fa74c02f04b-kube-api-access-mkffq" (OuterVolumeSpecName: "kube-api-access-mkffq") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "kube-api-access-mkffq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.054539 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.080760 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.090714 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "57520f45-3ab9-41ea-8a10-3fa74c02f04b" (UID: "57520f45-3ab9-41ea-8a10-3fa74c02f04b"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.155912 4912 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.155954 4912 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-node-log\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.155966 4912 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.155977 4912 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-slash\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.155986 4912 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.155994 4912 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.156003 4912 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.156012 4912 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.156022 4912 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/57520f45-3ab9-41ea-8a10-3fa74c02f04b-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.156047 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkffq\" (UniqueName: \"kubernetes.io/projected/57520f45-3ab9-41ea-8a10-3fa74c02f04b-kube-api-access-mkffq\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.156058 4912 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.156067 4912 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.156110 4912 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.156119 4912 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.156128 4912 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/57520f45-3ab9-41ea-8a10-3fa74c02f04b-log-socket\") on node \"crc\" DevicePath \"\"" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193329 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mbt8h"] Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193625 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193638 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193648 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovn-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193654 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovn-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193661 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3f1f4ed-5050-40db-9fe0-7979c52368e0" containerName="pull" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193667 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3f1f4ed-5050-40db-9fe0-7979c52368e0" containerName="pull" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193678 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193684 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193694 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="sbdb" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193700 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="sbdb" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193710 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="kube-rbac-proxy-node" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193716 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="kube-rbac-proxy-node" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193725 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193731 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193741 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="nbdb" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193747 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="nbdb" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193753 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="kube-rbac-proxy-ovn-metrics" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193761 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="kube-rbac-proxy-ovn-metrics" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193769 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="kubecfg-setup" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193775 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="kubecfg-setup" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193784 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3f1f4ed-5050-40db-9fe0-7979c52368e0" containerName="extract" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193789 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3f1f4ed-5050-40db-9fe0-7979c52368e0" containerName="extract" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193795 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="northd" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193801 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="northd" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193812 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovn-acl-logging" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193817 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovn-acl-logging" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.193825 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3f1f4ed-5050-40db-9fe0-7979c52368e0" containerName="util" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193830 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3f1f4ed-5050-40db-9fe0-7979c52368e0" containerName="util" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193943 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovn-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193955 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193961 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3f1f4ed-5050-40db-9fe0-7979c52368e0" containerName="extract" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193970 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="kube-rbac-proxy-node" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193975 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193983 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovn-acl-logging" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193990 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="sbdb" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.193995 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="kube-rbac-proxy-ovn-metrics" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.194003 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="northd" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.194010 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="nbdb" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.194019 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.194175 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.194182 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.194196 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.194202 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.194294 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.194303 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerName="ovnkube-controller" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.196173 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257462 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/da00d5dd-cb50-470c-9c96-2078e2cd64eb-env-overrides\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257529 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-run-ovn\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257559 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-log-socket\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257587 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-node-log\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257617 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/da00d5dd-cb50-470c-9c96-2078e2cd64eb-ovnkube-script-lib\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257644 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-systemd-units\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257667 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-run-netns\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257694 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-slash\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257717 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-run-ovn-kubernetes\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257737 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-cni-bin\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257764 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-kubelet\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257787 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-var-lib-openvswitch\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257812 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/da00d5dd-cb50-470c-9c96-2078e2cd64eb-ovn-node-metrics-cert\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257846 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257870 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-run-systemd\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257894 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/da00d5dd-cb50-470c-9c96-2078e2cd64eb-ovnkube-config\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.257920 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-run-openvswitch\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.258050 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-etc-openvswitch\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.258084 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpv68\" (UniqueName: \"kubernetes.io/projected/da00d5dd-cb50-470c-9c96-2078e2cd64eb-kube-api-access-cpv68\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.258107 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-cni-netd\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359519 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-cni-netd\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359573 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/da00d5dd-cb50-470c-9c96-2078e2cd64eb-env-overrides\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359600 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-run-ovn\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359616 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-log-socket\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359638 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/da00d5dd-cb50-470c-9c96-2078e2cd64eb-ovnkube-script-lib\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359653 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-node-log\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359669 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-systemd-units\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359686 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-run-netns\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359703 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-slash\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359729 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-run-ovn-kubernetes\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359752 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-cni-bin\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359775 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-kubelet\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359793 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-var-lib-openvswitch\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359811 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/da00d5dd-cb50-470c-9c96-2078e2cd64eb-ovn-node-metrics-cert\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359834 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359858 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-run-systemd\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359878 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/da00d5dd-cb50-470c-9c96-2078e2cd64eb-ovnkube-config\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359898 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-run-openvswitch\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359917 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-etc-openvswitch\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.359941 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpv68\" (UniqueName: \"kubernetes.io/projected/da00d5dd-cb50-470c-9c96-2078e2cd64eb-kube-api-access-cpv68\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.360301 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-run-ovn-kubernetes\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.360334 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-run-systemd\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.360468 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-node-log\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.360579 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-systemd-units\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.360639 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-run-netns\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.360746 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-slash\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.361140 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.361277 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-cni-bin\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.361346 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-log-socket\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.361326 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-run-ovn\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.361297 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/da00d5dd-cb50-470c-9c96-2078e2cd64eb-ovnkube-script-lib\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.361415 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-run-openvswitch\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.361439 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/da00d5dd-cb50-470c-9c96-2078e2cd64eb-ovnkube-config\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.361463 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-cni-netd\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.361493 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-etc-openvswitch\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.361505 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-var-lib-openvswitch\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.361704 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/da00d5dd-cb50-470c-9c96-2078e2cd64eb-host-kubelet\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.361724 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/da00d5dd-cb50-470c-9c96-2078e2cd64eb-env-overrides\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.365057 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/da00d5dd-cb50-470c-9c96-2078e2cd64eb-ovn-node-metrics-cert\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.377734 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpv68\" (UniqueName: \"kubernetes.io/projected/da00d5dd-cb50-470c-9c96-2078e2cd64eb-kube-api-access-cpv68\") pod \"ovnkube-node-mbt8h\" (UID: \"da00d5dd-cb50-470c-9c96-2078e2cd64eb\") " pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.511827 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.522253 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovnkube-controller/3.log" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.524756 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovn-acl-logging/0.log" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.525335 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7qdqq_57520f45-3ab9-41ea-8a10-3fa74c02f04b/ovn-controller/0.log" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.525772 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab" exitCode=0 Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.525861 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.525891 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.525907 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.525925 4912 scope.go:117] "RemoveContainer" containerID="f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.525867 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8" exitCode=0 Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526021 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45" exitCode=0 Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526083 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4" exitCode=0 Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526091 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f" exitCode=0 Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526097 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03" exitCode=0 Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526103 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047" exitCode=143 Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526109 4912 generic.go:334] "Generic (PLEG): container finished" podID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" containerID="79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096" exitCode=143 Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526137 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526149 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526158 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526167 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526178 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526189 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526194 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526200 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526205 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526210 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526215 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526221 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526226 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526234 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526242 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526250 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526256 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526262 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526267 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526272 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526277 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526283 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526288 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526294 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526300 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526308 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526314 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526320 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526326 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526330 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526335 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526340 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526345 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526350 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526356 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526362 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qdqq" event={"ID":"57520f45-3ab9-41ea-8a10-3fa74c02f04b","Type":"ContainerDied","Data":"bb6830e62db0923b2f6703a0bc995e1562700c40e2c0d9bb156debffcf3831e8"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526369 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526375 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526380 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526385 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526390 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526394 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526399 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526404 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526408 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.526413 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.529684 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rp5rf_959add28-5508-49d7-8fe3-404acef398b0/kube-multus/2.log" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.530925 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rp5rf_959add28-5508-49d7-8fe3-404acef398b0/kube-multus/1.log" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.531020 4912 generic.go:334] "Generic (PLEG): container finished" podID="959add28-5508-49d7-8fe3-404acef398b0" containerID="638726f38d0ee5e325aad32f9f85b601c871f91699d426f8fa589839fab05eb1" exitCode=2 Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.531094 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rp5rf" event={"ID":"959add28-5508-49d7-8fe3-404acef398b0","Type":"ContainerDied","Data":"638726f38d0ee5e325aad32f9f85b601c871f91699d426f8fa589839fab05eb1"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.531135 4912 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83"} Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.531934 4912 scope.go:117] "RemoveContainer" containerID="638726f38d0ee5e325aad32f9f85b601c871f91699d426f8fa589839fab05eb1" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.580602 4912 scope.go:117] "RemoveContainer" containerID="14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.583818 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7qdqq"] Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.588150 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7qdqq"] Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.612241 4912 scope.go:117] "RemoveContainer" containerID="40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.653309 4912 scope.go:117] "RemoveContainer" containerID="1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.680319 4912 scope.go:117] "RemoveContainer" containerID="4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.698954 4912 scope.go:117] "RemoveContainer" containerID="96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.718533 4912 scope.go:117] "RemoveContainer" containerID="1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.818823 4912 scope.go:117] "RemoveContainer" containerID="8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.851384 4912 scope.go:117] "RemoveContainer" containerID="79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.870163 4912 scope.go:117] "RemoveContainer" containerID="2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.886347 4912 scope.go:117] "RemoveContainer" containerID="f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.886741 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab\": container with ID starting with f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab not found: ID does not exist" containerID="f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.886782 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab"} err="failed to get container status \"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab\": rpc error: code = NotFound desc = could not find container \"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab\": container with ID starting with f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.886809 4912 scope.go:117] "RemoveContainer" containerID="14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.887063 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\": container with ID starting with 14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a not found: ID does not exist" containerID="14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.887090 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a"} err="failed to get container status \"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\": rpc error: code = NotFound desc = could not find container \"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\": container with ID starting with 14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.887105 4912 scope.go:117] "RemoveContainer" containerID="40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.887326 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\": container with ID starting with 40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8 not found: ID does not exist" containerID="40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.887351 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8"} err="failed to get container status \"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\": rpc error: code = NotFound desc = could not find container \"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\": container with ID starting with 40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.887365 4912 scope.go:117] "RemoveContainer" containerID="1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.887580 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\": container with ID starting with 1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45 not found: ID does not exist" containerID="1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.887601 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45"} err="failed to get container status \"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\": rpc error: code = NotFound desc = could not find container \"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\": container with ID starting with 1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.887617 4912 scope.go:117] "RemoveContainer" containerID="4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.887804 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\": container with ID starting with 4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4 not found: ID does not exist" containerID="4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.887831 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4"} err="failed to get container status \"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\": rpc error: code = NotFound desc = could not find container \"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\": container with ID starting with 4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.887845 4912 scope.go:117] "RemoveContainer" containerID="96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.888166 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\": container with ID starting with 96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f not found: ID does not exist" containerID="96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.888235 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f"} err="failed to get container status \"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\": rpc error: code = NotFound desc = could not find container \"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\": container with ID starting with 96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.888382 4912 scope.go:117] "RemoveContainer" containerID="1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.888611 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\": container with ID starting with 1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03 not found: ID does not exist" containerID="1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.888634 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03"} err="failed to get container status \"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\": rpc error: code = NotFound desc = could not find container \"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\": container with ID starting with 1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.888647 4912 scope.go:117] "RemoveContainer" containerID="8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.888871 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\": container with ID starting with 8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047 not found: ID does not exist" containerID="8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.888894 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047"} err="failed to get container status \"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\": rpc error: code = NotFound desc = could not find container \"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\": container with ID starting with 8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.888909 4912 scope.go:117] "RemoveContainer" containerID="79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.889159 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\": container with ID starting with 79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096 not found: ID does not exist" containerID="79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.889194 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096"} err="failed to get container status \"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\": rpc error: code = NotFound desc = could not find container \"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\": container with ID starting with 79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.889233 4912 scope.go:117] "RemoveContainer" containerID="2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b" Dec 08 21:31:11 crc kubenswrapper[4912]: E1208 21:31:11.889532 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\": container with ID starting with 2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b not found: ID does not exist" containerID="2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.889566 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b"} err="failed to get container status \"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\": rpc error: code = NotFound desc = could not find container \"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\": container with ID starting with 2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.889582 4912 scope.go:117] "RemoveContainer" containerID="f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.889772 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab"} err="failed to get container status \"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab\": rpc error: code = NotFound desc = could not find container \"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab\": container with ID starting with f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.889802 4912 scope.go:117] "RemoveContainer" containerID="14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.890022 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a"} err="failed to get container status \"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\": rpc error: code = NotFound desc = could not find container \"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\": container with ID starting with 14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.890062 4912 scope.go:117] "RemoveContainer" containerID="40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.890296 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8"} err="failed to get container status \"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\": rpc error: code = NotFound desc = could not find container \"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\": container with ID starting with 40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.890319 4912 scope.go:117] "RemoveContainer" containerID="1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.890506 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45"} err="failed to get container status \"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\": rpc error: code = NotFound desc = could not find container \"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\": container with ID starting with 1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.890527 4912 scope.go:117] "RemoveContainer" containerID="4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.890755 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4"} err="failed to get container status \"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\": rpc error: code = NotFound desc = could not find container \"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\": container with ID starting with 4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.890783 4912 scope.go:117] "RemoveContainer" containerID="96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.891125 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f"} err="failed to get container status \"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\": rpc error: code = NotFound desc = could not find container \"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\": container with ID starting with 96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.891146 4912 scope.go:117] "RemoveContainer" containerID="1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.891371 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03"} err="failed to get container status \"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\": rpc error: code = NotFound desc = could not find container \"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\": container with ID starting with 1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.891394 4912 scope.go:117] "RemoveContainer" containerID="8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.891624 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047"} err="failed to get container status \"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\": rpc error: code = NotFound desc = could not find container \"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\": container with ID starting with 8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.891666 4912 scope.go:117] "RemoveContainer" containerID="79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.891881 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096"} err="failed to get container status \"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\": rpc error: code = NotFound desc = could not find container \"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\": container with ID starting with 79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.891911 4912 scope.go:117] "RemoveContainer" containerID="2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.892119 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b"} err="failed to get container status \"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\": rpc error: code = NotFound desc = could not find container \"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\": container with ID starting with 2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.892142 4912 scope.go:117] "RemoveContainer" containerID="f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.892339 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab"} err="failed to get container status \"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab\": rpc error: code = NotFound desc = could not find container \"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab\": container with ID starting with f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.892358 4912 scope.go:117] "RemoveContainer" containerID="14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.892539 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a"} err="failed to get container status \"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\": rpc error: code = NotFound desc = could not find container \"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\": container with ID starting with 14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.892558 4912 scope.go:117] "RemoveContainer" containerID="40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.892768 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8"} err="failed to get container status \"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\": rpc error: code = NotFound desc = could not find container \"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\": container with ID starting with 40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.892798 4912 scope.go:117] "RemoveContainer" containerID="1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.893004 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45"} err="failed to get container status \"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\": rpc error: code = NotFound desc = could not find container \"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\": container with ID starting with 1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.893044 4912 scope.go:117] "RemoveContainer" containerID="4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.893240 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4"} err="failed to get container status \"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\": rpc error: code = NotFound desc = could not find container \"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\": container with ID starting with 4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.893261 4912 scope.go:117] "RemoveContainer" containerID="96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.893549 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f"} err="failed to get container status \"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\": rpc error: code = NotFound desc = could not find container \"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\": container with ID starting with 96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.893598 4912 scope.go:117] "RemoveContainer" containerID="1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.893915 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03"} err="failed to get container status \"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\": rpc error: code = NotFound desc = could not find container \"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\": container with ID starting with 1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.893939 4912 scope.go:117] "RemoveContainer" containerID="8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.894171 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047"} err="failed to get container status \"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\": rpc error: code = NotFound desc = could not find container \"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\": container with ID starting with 8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.894199 4912 scope.go:117] "RemoveContainer" containerID="79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.894446 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096"} err="failed to get container status \"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\": rpc error: code = NotFound desc = could not find container \"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\": container with ID starting with 79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.894469 4912 scope.go:117] "RemoveContainer" containerID="2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.894687 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b"} err="failed to get container status \"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\": rpc error: code = NotFound desc = could not find container \"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\": container with ID starting with 2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.894708 4912 scope.go:117] "RemoveContainer" containerID="f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.894925 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab"} err="failed to get container status \"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab\": rpc error: code = NotFound desc = could not find container \"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab\": container with ID starting with f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.894949 4912 scope.go:117] "RemoveContainer" containerID="14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.895207 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a"} err="failed to get container status \"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\": rpc error: code = NotFound desc = could not find container \"14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a\": container with ID starting with 14c425854fb5b3ba89d2e6c355203cf2db8764beaf0ae862b6c9455efb1f322a not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.895236 4912 scope.go:117] "RemoveContainer" containerID="40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.895486 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8"} err="failed to get container status \"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\": rpc error: code = NotFound desc = could not find container \"40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8\": container with ID starting with 40e263ff9105a7f6c5a575f5d094e2c3ca27fa73ebc2ffc88a81ebcfb34c97a8 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.895512 4912 scope.go:117] "RemoveContainer" containerID="1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.895746 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45"} err="failed to get container status \"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\": rpc error: code = NotFound desc = could not find container \"1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45\": container with ID starting with 1a65e4e8240fa5efdd1baa58ec29dbd1ea0197e754ea9cd7a586195820cfda45 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.895771 4912 scope.go:117] "RemoveContainer" containerID="4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.895995 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4"} err="failed to get container status \"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\": rpc error: code = NotFound desc = could not find container \"4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4\": container with ID starting with 4083189f524454c5ccb3f561886c78a84b551210db43169ff833cbd56f2c9ad4 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.896025 4912 scope.go:117] "RemoveContainer" containerID="96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.896254 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f"} err="failed to get container status \"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\": rpc error: code = NotFound desc = could not find container \"96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f\": container with ID starting with 96e016d8dbe60e2e48fc021b6512119361c775c04ff8d723d223e5e3b0d3cb9f not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.896278 4912 scope.go:117] "RemoveContainer" containerID="1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.896461 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03"} err="failed to get container status \"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\": rpc error: code = NotFound desc = could not find container \"1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03\": container with ID starting with 1934417356e13eeba20cbc4e5ff4e80c520fd5d669433736189832aa355d8a03 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.896483 4912 scope.go:117] "RemoveContainer" containerID="8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.896644 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047"} err="failed to get container status \"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\": rpc error: code = NotFound desc = could not find container \"8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047\": container with ID starting with 8aaeed9ab765d8f33d05ea6ecd964e3c4920cd92210a752b975403b6b1ed3047 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.896667 4912 scope.go:117] "RemoveContainer" containerID="79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.896885 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096"} err="failed to get container status \"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\": rpc error: code = NotFound desc = could not find container \"79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096\": container with ID starting with 79025d925335714a9377fc82dbac10b894e8fa353254749d00df58cc6ba52096 not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.896980 4912 scope.go:117] "RemoveContainer" containerID="2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.897258 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b"} err="failed to get container status \"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\": rpc error: code = NotFound desc = could not find container \"2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b\": container with ID starting with 2e17174bc81e1c176225c487d7880f32d834b307ebfd2d0e0961f20b93ed5e3b not found: ID does not exist" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.897278 4912 scope.go:117] "RemoveContainer" containerID="f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab" Dec 08 21:31:11 crc kubenswrapper[4912]: I1208 21:31:11.897493 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab"} err="failed to get container status \"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab\": rpc error: code = NotFound desc = could not find container \"f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab\": container with ID starting with f9ef9334a0d85d2b8885eca9a0c3e81565c14195920508837ff9bf11e2dc9dab not found: ID does not exist" Dec 08 21:31:12 crc kubenswrapper[4912]: I1208 21:31:12.438876 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57520f45-3ab9-41ea-8a10-3fa74c02f04b" path="/var/lib/kubelet/pods/57520f45-3ab9-41ea-8a10-3fa74c02f04b/volumes" Dec 08 21:31:12 crc kubenswrapper[4912]: I1208 21:31:12.539142 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" event={"ID":"da00d5dd-cb50-470c-9c96-2078e2cd64eb","Type":"ContainerDied","Data":"f56e15edd5877833311ab5aba6a9f296ff087f1c30a8698a1a240078bdc3caf9"} Dec 08 21:31:12 crc kubenswrapper[4912]: I1208 21:31:12.539143 4912 generic.go:334] "Generic (PLEG): container finished" podID="da00d5dd-cb50-470c-9c96-2078e2cd64eb" containerID="f56e15edd5877833311ab5aba6a9f296ff087f1c30a8698a1a240078bdc3caf9" exitCode=0 Dec 08 21:31:12 crc kubenswrapper[4912]: I1208 21:31:12.539363 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" event={"ID":"da00d5dd-cb50-470c-9c96-2078e2cd64eb","Type":"ContainerStarted","Data":"0ed57ea6521278c83d057b0da182b3fe11beef519cb310ce4ca6848dbb6bb768"} Dec 08 21:31:12 crc kubenswrapper[4912]: I1208 21:31:12.543396 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rp5rf_959add28-5508-49d7-8fe3-404acef398b0/kube-multus/2.log" Dec 08 21:31:12 crc kubenswrapper[4912]: I1208 21:31:12.543850 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rp5rf_959add28-5508-49d7-8fe3-404acef398b0/kube-multus/1.log" Dec 08 21:31:12 crc kubenswrapper[4912]: I1208 21:31:12.543901 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-rp5rf" event={"ID":"959add28-5508-49d7-8fe3-404acef398b0","Type":"ContainerStarted","Data":"0071706017e245b20283a518af30da4d70a16c9c85f4a1d1ec69178395b59831"} Dec 08 21:31:13 crc kubenswrapper[4912]: I1208 21:31:13.684756 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" event={"ID":"da00d5dd-cb50-470c-9c96-2078e2cd64eb","Type":"ContainerStarted","Data":"4d5f2af12736acbb088492cb82a626b55af90ee6ee214a9c30bbd7138cfdab15"} Dec 08 21:31:13 crc kubenswrapper[4912]: I1208 21:31:13.685270 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" event={"ID":"da00d5dd-cb50-470c-9c96-2078e2cd64eb","Type":"ContainerStarted","Data":"c08924f6193f46b9bde0999d67054622ca6f4a1d95e830cad12c3abebf6fdbc2"} Dec 08 21:31:13 crc kubenswrapper[4912]: I1208 21:31:13.685285 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" event={"ID":"da00d5dd-cb50-470c-9c96-2078e2cd64eb","Type":"ContainerStarted","Data":"a6eea1b42341049bb9edc8e191cbe6bee5347c6714e9dfd87dcc75cb22f0e102"} Dec 08 21:31:14 crc kubenswrapper[4912]: I1208 21:31:14.696813 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" event={"ID":"da00d5dd-cb50-470c-9c96-2078e2cd64eb","Type":"ContainerStarted","Data":"046aaf69a0ca15cf5b5a10969c2158f04185f9b770c001104bdb62029237ce12"} Dec 08 21:31:14 crc kubenswrapper[4912]: I1208 21:31:14.697307 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" event={"ID":"da00d5dd-cb50-470c-9c96-2078e2cd64eb","Type":"ContainerStarted","Data":"ec5209a55b68a00969cf07b5de0616e4a21c8288ba37a23cb73f80d2ce17a16a"} Dec 08 21:31:15 crc kubenswrapper[4912]: I1208 21:31:15.706739 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" event={"ID":"da00d5dd-cb50-470c-9c96-2078e2cd64eb","Type":"ContainerStarted","Data":"0e92351357c34a85bd2c88419d957fe63e9b7a9f0952bbcaf9949d40d15debc5"} Dec 08 21:31:17 crc kubenswrapper[4912]: I1208 21:31:17.780851 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" event={"ID":"da00d5dd-cb50-470c-9c96-2078e2cd64eb","Type":"ContainerStarted","Data":"f59757d760da312a333e7eb9d9afe4414991b05bd147ec937e7784f6e81b5e11"} Dec 08 21:31:20 crc kubenswrapper[4912]: I1208 21:31:20.793058 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp"] Dec 08 21:31:20 crc kubenswrapper[4912]: I1208 21:31:20.794571 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:20 crc kubenswrapper[4912]: I1208 21:31:20.799573 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 08 21:31:20 crc kubenswrapper[4912]: I1208 21:31:20.799597 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-48gt8" Dec 08 21:31:20 crc kubenswrapper[4912]: I1208 21:31:20.799962 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 08 21:31:20 crc kubenswrapper[4912]: I1208 21:31:20.944178 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g76v\" (UniqueName: \"kubernetes.io/projected/065174a4-9aac-46c1-b83c-71861f156ee3-kube-api-access-8g76v\") pod \"obo-prometheus-operator-668cf9dfbb-gxbwp\" (UID: \"065174a4-9aac-46c1-b83c-71861f156ee3\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.045796 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g76v\" (UniqueName: \"kubernetes.io/projected/065174a4-9aac-46c1-b83c-71861f156ee3-kube-api-access-8g76v\") pod \"obo-prometheus-operator-668cf9dfbb-gxbwp\" (UID: \"065174a4-9aac-46c1-b83c-71861f156ee3\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.075838 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g76v\" (UniqueName: \"kubernetes.io/projected/065174a4-9aac-46c1-b83c-71861f156ee3-kube-api-access-8g76v\") pod \"obo-prometheus-operator-668cf9dfbb-gxbwp\" (UID: \"065174a4-9aac-46c1-b83c-71861f156ee3\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.113844 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:21 crc kubenswrapper[4912]: E1208 21:31:21.154640 4912 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators_065174a4-9aac-46c1-b83c-71861f156ee3_0(6ca1f0be06987af5368b0ed54da9fca23fff45fe3361cdb175f10abec520ab4c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:31:21 crc kubenswrapper[4912]: E1208 21:31:21.154739 4912 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators_065174a4-9aac-46c1-b83c-71861f156ee3_0(6ca1f0be06987af5368b0ed54da9fca23fff45fe3361cdb175f10abec520ab4c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:21 crc kubenswrapper[4912]: E1208 21:31:21.154765 4912 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators_065174a4-9aac-46c1-b83c-71861f156ee3_0(6ca1f0be06987af5368b0ed54da9fca23fff45fe3361cdb175f10abec520ab4c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:21 crc kubenswrapper[4912]: E1208 21:31:21.154845 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators(065174a4-9aac-46c1-b83c-71861f156ee3)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators(065174a4-9aac-46c1-b83c-71861f156ee3)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators_065174a4-9aac-46c1-b83c-71861f156ee3_0(6ca1f0be06987af5368b0ed54da9fca23fff45fe3361cdb175f10abec520ab4c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" podUID="065174a4-9aac-46c1-b83c-71861f156ee3" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.361163 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc"] Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.362997 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.372704 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-x5q7r" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.373097 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.401876 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6"] Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.403215 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.485129 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-rc26h"] Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.486289 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.489776 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.493492 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-bnztf" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.582982 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e9a7a69b-d12e-48e7-899f-2c919d23d906-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-785d46d47d-wgntc\" (UID: \"e9a7a69b-d12e-48e7-899f-2c919d23d906\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.583252 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e9a7a69b-d12e-48e7-899f-2c919d23d906-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-785d46d47d-wgntc\" (UID: \"e9a7a69b-d12e-48e7-899f-2c919d23d906\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.583291 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7b4799e4-efd5-4f47-b53b-a056d4a3d046-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6\" (UID: \"7b4799e4-efd5-4f47-b53b-a056d4a3d046\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.583353 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7b4799e4-efd5-4f47-b53b-a056d4a3d046-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6\" (UID: \"7b4799e4-efd5-4f47-b53b-a056d4a3d046\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.663574 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-7t6lg"] Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.664428 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.670202 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-zht2w" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.685774 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e9a7a69b-d12e-48e7-899f-2c919d23d906-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-785d46d47d-wgntc\" (UID: \"e9a7a69b-d12e-48e7-899f-2c919d23d906\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.685827 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdlxq\" (UniqueName: \"kubernetes.io/projected/b9c5dc1e-7823-4b2d-9983-8e23244bb2b9-kube-api-access-zdlxq\") pod \"observability-operator-d8bb48f5d-rc26h\" (UID: \"b9c5dc1e-7823-4b2d-9983-8e23244bb2b9\") " pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.685906 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e9a7a69b-d12e-48e7-899f-2c919d23d906-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-785d46d47d-wgntc\" (UID: \"e9a7a69b-d12e-48e7-899f-2c919d23d906\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.685927 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7b4799e4-efd5-4f47-b53b-a056d4a3d046-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6\" (UID: \"7b4799e4-efd5-4f47-b53b-a056d4a3d046\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.685964 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/b9c5dc1e-7823-4b2d-9983-8e23244bb2b9-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-rc26h\" (UID: \"b9c5dc1e-7823-4b2d-9983-8e23244bb2b9\") " pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.685996 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpqtd\" (UniqueName: \"kubernetes.io/projected/e7a6b0fa-1136-439f-a0bd-96f6855dec8f-kube-api-access-gpqtd\") pod \"perses-operator-5446b9c989-7t6lg\" (UID: \"e7a6b0fa-1136-439f-a0bd-96f6855dec8f\") " pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.686014 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/e7a6b0fa-1136-439f-a0bd-96f6855dec8f-openshift-service-ca\") pod \"perses-operator-5446b9c989-7t6lg\" (UID: \"e7a6b0fa-1136-439f-a0bd-96f6855dec8f\") " pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.686048 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7b4799e4-efd5-4f47-b53b-a056d4a3d046-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6\" (UID: \"7b4799e4-efd5-4f47-b53b-a056d4a3d046\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.692415 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e9a7a69b-d12e-48e7-899f-2c919d23d906-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-785d46d47d-wgntc\" (UID: \"e9a7a69b-d12e-48e7-899f-2c919d23d906\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.693001 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e9a7a69b-d12e-48e7-899f-2c919d23d906-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-785d46d47d-wgntc\" (UID: \"e9a7a69b-d12e-48e7-899f-2c919d23d906\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.702261 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7b4799e4-efd5-4f47-b53b-a056d4a3d046-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6\" (UID: \"7b4799e4-efd5-4f47-b53b-a056d4a3d046\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.705365 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.721545 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7b4799e4-efd5-4f47-b53b-a056d4a3d046-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6\" (UID: \"7b4799e4-efd5-4f47-b53b-a056d4a3d046\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.723138 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:21 crc kubenswrapper[4912]: E1208 21:31:21.749274 4912 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators_e9a7a69b-d12e-48e7-899f-2c919d23d906_0(0a637c8278d16142a1e27e8df0978b1ff5205966f0834a89db6493dc6ed63f9f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:31:21 crc kubenswrapper[4912]: E1208 21:31:21.749369 4912 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators_e9a7a69b-d12e-48e7-899f-2c919d23d906_0(0a637c8278d16142a1e27e8df0978b1ff5205966f0834a89db6493dc6ed63f9f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:21 crc kubenswrapper[4912]: E1208 21:31:21.749403 4912 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators_e9a7a69b-d12e-48e7-899f-2c919d23d906_0(0a637c8278d16142a1e27e8df0978b1ff5205966f0834a89db6493dc6ed63f9f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:21 crc kubenswrapper[4912]: E1208 21:31:21.749524 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators(e9a7a69b-d12e-48e7-899f-2c919d23d906)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators(e9a7a69b-d12e-48e7-899f-2c919d23d906)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators_e9a7a69b-d12e-48e7-899f-2c919d23d906_0(0a637c8278d16142a1e27e8df0978b1ff5205966f0834a89db6493dc6ed63f9f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" podUID="e9a7a69b-d12e-48e7-899f-2c919d23d906" Dec 08 21:31:21 crc kubenswrapper[4912]: E1208 21:31:21.755564 4912 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators_7b4799e4-efd5-4f47-b53b-a056d4a3d046_0(6a08cdd0865667bf3a9d2a472e3194d15ac7cba39803ee185618f85e18909c21): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:31:21 crc kubenswrapper[4912]: E1208 21:31:21.755680 4912 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators_7b4799e4-efd5-4f47-b53b-a056d4a3d046_0(6a08cdd0865667bf3a9d2a472e3194d15ac7cba39803ee185618f85e18909c21): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:21 crc kubenswrapper[4912]: E1208 21:31:21.755726 4912 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators_7b4799e4-efd5-4f47-b53b-a056d4a3d046_0(6a08cdd0865667bf3a9d2a472e3194d15ac7cba39803ee185618f85e18909c21): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:21 crc kubenswrapper[4912]: E1208 21:31:21.755815 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators(7b4799e4-efd5-4f47-b53b-a056d4a3d046)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators(7b4799e4-efd5-4f47-b53b-a056d4a3d046)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators_7b4799e4-efd5-4f47-b53b-a056d4a3d046_0(6a08cdd0865667bf3a9d2a472e3194d15ac7cba39803ee185618f85e18909c21): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" podUID="7b4799e4-efd5-4f47-b53b-a056d4a3d046" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.787204 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/b9c5dc1e-7823-4b2d-9983-8e23244bb2b9-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-rc26h\" (UID: \"b9c5dc1e-7823-4b2d-9983-8e23244bb2b9\") " pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.787276 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpqtd\" (UniqueName: \"kubernetes.io/projected/e7a6b0fa-1136-439f-a0bd-96f6855dec8f-kube-api-access-gpqtd\") pod \"perses-operator-5446b9c989-7t6lg\" (UID: \"e7a6b0fa-1136-439f-a0bd-96f6855dec8f\") " pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.787301 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/e7a6b0fa-1136-439f-a0bd-96f6855dec8f-openshift-service-ca\") pod \"perses-operator-5446b9c989-7t6lg\" (UID: \"e7a6b0fa-1136-439f-a0bd-96f6855dec8f\") " pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.787343 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdlxq\" (UniqueName: \"kubernetes.io/projected/b9c5dc1e-7823-4b2d-9983-8e23244bb2b9-kube-api-access-zdlxq\") pod \"observability-operator-d8bb48f5d-rc26h\" (UID: \"b9c5dc1e-7823-4b2d-9983-8e23244bb2b9\") " pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.788807 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/e7a6b0fa-1136-439f-a0bd-96f6855dec8f-openshift-service-ca\") pod \"perses-operator-5446b9c989-7t6lg\" (UID: \"e7a6b0fa-1136-439f-a0bd-96f6855dec8f\") " pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.792335 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/b9c5dc1e-7823-4b2d-9983-8e23244bb2b9-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-rc26h\" (UID: \"b9c5dc1e-7823-4b2d-9983-8e23244bb2b9\") " pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.806945 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpqtd\" (UniqueName: \"kubernetes.io/projected/e7a6b0fa-1136-439f-a0bd-96f6855dec8f-kube-api-access-gpqtd\") pod \"perses-operator-5446b9c989-7t6lg\" (UID: \"e7a6b0fa-1136-439f-a0bd-96f6855dec8f\") " pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.807999 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdlxq\" (UniqueName: \"kubernetes.io/projected/b9c5dc1e-7823-4b2d-9983-8e23244bb2b9-kube-api-access-zdlxq\") pod \"observability-operator-d8bb48f5d-rc26h\" (UID: \"b9c5dc1e-7823-4b2d-9983-8e23244bb2b9\") " pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.814135 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" event={"ID":"da00d5dd-cb50-470c-9c96-2078e2cd64eb","Type":"ContainerStarted","Data":"7c87024c4983a15e240fdfe29da77d3bb0a2a78a3f075294d092e973a7e0c757"} Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.814471 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.814562 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.872276 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" podStartSLOduration=10.872252507 podStartE2EDuration="10.872252507s" podCreationTimestamp="2025-12-08 21:31:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:31:21.868276157 +0000 UTC m=+763.731278240" watchObservedRunningTime="2025-12-08 21:31:21.872252507 +0000 UTC m=+763.735254590" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.938260 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:21 crc kubenswrapper[4912]: I1208 21:31:21.986251 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:22 crc kubenswrapper[4912]: E1208 21:31:22.015792 4912 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-7t6lg_openshift-operators_e7a6b0fa-1136-439f-a0bd-96f6855dec8f_0(e3ec873ebe4c4985fe821c520435cc7823f2f897978d057860fe2685176b10b2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:31:22 crc kubenswrapper[4912]: E1208 21:31:22.015889 4912 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-7t6lg_openshift-operators_e7a6b0fa-1136-439f-a0bd-96f6855dec8f_0(e3ec873ebe4c4985fe821c520435cc7823f2f897978d057860fe2685176b10b2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:22 crc kubenswrapper[4912]: E1208 21:31:22.015924 4912 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-7t6lg_openshift-operators_e7a6b0fa-1136-439f-a0bd-96f6855dec8f_0(e3ec873ebe4c4985fe821c520435cc7823f2f897978d057860fe2685176b10b2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:22 crc kubenswrapper[4912]: E1208 21:31:22.015992 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-7t6lg_openshift-operators(e7a6b0fa-1136-439f-a0bd-96f6855dec8f)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-7t6lg_openshift-operators(e7a6b0fa-1136-439f-a0bd-96f6855dec8f)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-7t6lg_openshift-operators_e7a6b0fa-1136-439f-a0bd-96f6855dec8f_0(e3ec873ebe4c4985fe821c520435cc7823f2f897978d057860fe2685176b10b2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" podUID="e7a6b0fa-1136-439f-a0bd-96f6855dec8f" Dec 08 21:31:22 crc kubenswrapper[4912]: I1208 21:31:22.126815 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:22 crc kubenswrapper[4912]: E1208 21:31:22.162386 4912 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rc26h_openshift-operators_b9c5dc1e-7823-4b2d-9983-8e23244bb2b9_0(1bcfc992316ffa9d10d8323743d1e83f0df163551e64b8db9d9c235b87873918): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:31:22 crc kubenswrapper[4912]: E1208 21:31:22.162485 4912 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rc26h_openshift-operators_b9c5dc1e-7823-4b2d-9983-8e23244bb2b9_0(1bcfc992316ffa9d10d8323743d1e83f0df163551e64b8db9d9c235b87873918): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:22 crc kubenswrapper[4912]: E1208 21:31:22.162516 4912 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rc26h_openshift-operators_b9c5dc1e-7823-4b2d-9983-8e23244bb2b9_0(1bcfc992316ffa9d10d8323743d1e83f0df163551e64b8db9d9c235b87873918): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:22 crc kubenswrapper[4912]: E1208 21:31:22.162577 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-rc26h_openshift-operators(b9c5dc1e-7823-4b2d-9983-8e23244bb2b9)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-rc26h_openshift-operators(b9c5dc1e-7823-4b2d-9983-8e23244bb2b9)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rc26h_openshift-operators_b9c5dc1e-7823-4b2d-9983-8e23244bb2b9_0(1bcfc992316ffa9d10d8323743d1e83f0df163551e64b8db9d9c235b87873918): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" podUID="b9c5dc1e-7823-4b2d-9983-8e23244bb2b9" Dec 08 21:31:22 crc kubenswrapper[4912]: I1208 21:31:22.820087 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:22 crc kubenswrapper[4912]: I1208 21:31:22.875399 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.648911 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-7t6lg"] Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.649099 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.649693 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.652889 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp"] Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.653063 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.653635 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.656856 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-rc26h"] Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.657020 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.657627 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.680099 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc"] Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.680240 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.680774 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.691705 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6"] Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.691837 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:23 crc kubenswrapper[4912]: I1208 21:31:23.692340 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.741642 4912 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-7t6lg_openshift-operators_e7a6b0fa-1136-439f-a0bd-96f6855dec8f_0(cdeadbe708097afd9bb66e5e0fce5314e04435b8b0cfcddaf23c47155215878e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.741739 4912 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-7t6lg_openshift-operators_e7a6b0fa-1136-439f-a0bd-96f6855dec8f_0(cdeadbe708097afd9bb66e5e0fce5314e04435b8b0cfcddaf23c47155215878e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.741774 4912 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-7t6lg_openshift-operators_e7a6b0fa-1136-439f-a0bd-96f6855dec8f_0(cdeadbe708097afd9bb66e5e0fce5314e04435b8b0cfcddaf23c47155215878e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.741856 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-7t6lg_openshift-operators(e7a6b0fa-1136-439f-a0bd-96f6855dec8f)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-7t6lg_openshift-operators(e7a6b0fa-1136-439f-a0bd-96f6855dec8f)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-7t6lg_openshift-operators_e7a6b0fa-1136-439f-a0bd-96f6855dec8f_0(cdeadbe708097afd9bb66e5e0fce5314e04435b8b0cfcddaf23c47155215878e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" podUID="e7a6b0fa-1136-439f-a0bd-96f6855dec8f" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.856140 4912 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rc26h_openshift-operators_b9c5dc1e-7823-4b2d-9983-8e23244bb2b9_0(bc240a93b37f73c57763edfe2d83fb190eb836f9dbcdead53d42ef56fd00d54f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.856226 4912 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rc26h_openshift-operators_b9c5dc1e-7823-4b2d-9983-8e23244bb2b9_0(bc240a93b37f73c57763edfe2d83fb190eb836f9dbcdead53d42ef56fd00d54f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.856258 4912 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rc26h_openshift-operators_b9c5dc1e-7823-4b2d-9983-8e23244bb2b9_0(bc240a93b37f73c57763edfe2d83fb190eb836f9dbcdead53d42ef56fd00d54f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.856305 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-rc26h_openshift-operators(b9c5dc1e-7823-4b2d-9983-8e23244bb2b9)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-rc26h_openshift-operators(b9c5dc1e-7823-4b2d-9983-8e23244bb2b9)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rc26h_openshift-operators_b9c5dc1e-7823-4b2d-9983-8e23244bb2b9_0(bc240a93b37f73c57763edfe2d83fb190eb836f9dbcdead53d42ef56fd00d54f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" podUID="b9c5dc1e-7823-4b2d-9983-8e23244bb2b9" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.862346 4912 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators_065174a4-9aac-46c1-b83c-71861f156ee3_0(70e69c1233a499306ea21e0fccfc5de3c30837d65b60c129909732442a0f4d99): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.862429 4912 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators_065174a4-9aac-46c1-b83c-71861f156ee3_0(70e69c1233a499306ea21e0fccfc5de3c30837d65b60c129909732442a0f4d99): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.862458 4912 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators_065174a4-9aac-46c1-b83c-71861f156ee3_0(70e69c1233a499306ea21e0fccfc5de3c30837d65b60c129909732442a0f4d99): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.862505 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators(065174a4-9aac-46c1-b83c-71861f156ee3)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators(065174a4-9aac-46c1-b83c-71861f156ee3)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators_065174a4-9aac-46c1-b83c-71861f156ee3_0(70e69c1233a499306ea21e0fccfc5de3c30837d65b60c129909732442a0f4d99): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" podUID="065174a4-9aac-46c1-b83c-71861f156ee3" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.877389 4912 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators_7b4799e4-efd5-4f47-b53b-a056d4a3d046_0(d2bdf038f953698e817db7e02dbf5c47d3849a6de4c5371de7a7fdfd56b27160): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.877493 4912 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators_7b4799e4-efd5-4f47-b53b-a056d4a3d046_0(d2bdf038f953698e817db7e02dbf5c47d3849a6de4c5371de7a7fdfd56b27160): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.877517 4912 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators_7b4799e4-efd5-4f47-b53b-a056d4a3d046_0(d2bdf038f953698e817db7e02dbf5c47d3849a6de4c5371de7a7fdfd56b27160): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.877580 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators(7b4799e4-efd5-4f47-b53b-a056d4a3d046)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators(7b4799e4-efd5-4f47-b53b-a056d4a3d046)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators_7b4799e4-efd5-4f47-b53b-a056d4a3d046_0(d2bdf038f953698e817db7e02dbf5c47d3849a6de4c5371de7a7fdfd56b27160): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" podUID="7b4799e4-efd5-4f47-b53b-a056d4a3d046" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.888579 4912 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators_e9a7a69b-d12e-48e7-899f-2c919d23d906_0(8f243a7d18c961aa72f9f18219f4f74f2dd578d3e91b6087dee21cbcf52cda3b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.888670 4912 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators_e9a7a69b-d12e-48e7-899f-2c919d23d906_0(8f243a7d18c961aa72f9f18219f4f74f2dd578d3e91b6087dee21cbcf52cda3b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.888714 4912 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators_e9a7a69b-d12e-48e7-899f-2c919d23d906_0(8f243a7d18c961aa72f9f18219f4f74f2dd578d3e91b6087dee21cbcf52cda3b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:23 crc kubenswrapper[4912]: E1208 21:31:23.888786 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators(e9a7a69b-d12e-48e7-899f-2c919d23d906)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators(e9a7a69b-d12e-48e7-899f-2c919d23d906)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators_e9a7a69b-d12e-48e7-899f-2c919d23d906_0(8f243a7d18c961aa72f9f18219f4f74f2dd578d3e91b6087dee21cbcf52cda3b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" podUID="e9a7a69b-d12e-48e7-899f-2c919d23d906" Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.535472 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vpq5d"] Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.537521 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.588766 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vpq5d"] Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.606948 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90e4ab2d-73de-458b-adc0-ffb3b9f50de2-catalog-content\") pod \"redhat-operators-vpq5d\" (UID: \"90e4ab2d-73de-458b-adc0-ffb3b9f50de2\") " pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.607014 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpmfw\" (UniqueName: \"kubernetes.io/projected/90e4ab2d-73de-458b-adc0-ffb3b9f50de2-kube-api-access-bpmfw\") pod \"redhat-operators-vpq5d\" (UID: \"90e4ab2d-73de-458b-adc0-ffb3b9f50de2\") " pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.607157 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90e4ab2d-73de-458b-adc0-ffb3b9f50de2-utilities\") pod \"redhat-operators-vpq5d\" (UID: \"90e4ab2d-73de-458b-adc0-ffb3b9f50de2\") " pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.708689 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90e4ab2d-73de-458b-adc0-ffb3b9f50de2-catalog-content\") pod \"redhat-operators-vpq5d\" (UID: \"90e4ab2d-73de-458b-adc0-ffb3b9f50de2\") " pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.708775 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpmfw\" (UniqueName: \"kubernetes.io/projected/90e4ab2d-73de-458b-adc0-ffb3b9f50de2-kube-api-access-bpmfw\") pod \"redhat-operators-vpq5d\" (UID: \"90e4ab2d-73de-458b-adc0-ffb3b9f50de2\") " pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.708851 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90e4ab2d-73de-458b-adc0-ffb3b9f50de2-utilities\") pod \"redhat-operators-vpq5d\" (UID: \"90e4ab2d-73de-458b-adc0-ffb3b9f50de2\") " pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.709476 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90e4ab2d-73de-458b-adc0-ffb3b9f50de2-catalog-content\") pod \"redhat-operators-vpq5d\" (UID: \"90e4ab2d-73de-458b-adc0-ffb3b9f50de2\") " pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.709569 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90e4ab2d-73de-458b-adc0-ffb3b9f50de2-utilities\") pod \"redhat-operators-vpq5d\" (UID: \"90e4ab2d-73de-458b-adc0-ffb3b9f50de2\") " pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.731767 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpmfw\" (UniqueName: \"kubernetes.io/projected/90e4ab2d-73de-458b-adc0-ffb3b9f50de2-kube-api-access-bpmfw\") pod \"redhat-operators-vpq5d\" (UID: \"90e4ab2d-73de-458b-adc0-ffb3b9f50de2\") " pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.858384 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.965790 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:31:32 crc kubenswrapper[4912]: I1208 21:31:32.965863 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:31:33 crc kubenswrapper[4912]: I1208 21:31:33.315561 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vpq5d"] Dec 08 21:31:33 crc kubenswrapper[4912]: I1208 21:31:33.897828 4912 generic.go:334] "Generic (PLEG): container finished" podID="90e4ab2d-73de-458b-adc0-ffb3b9f50de2" containerID="b5d256671ddd0bd26a871c6cc69773c55b484e543047dd77c3831cada5ac9a10" exitCode=0 Dec 08 21:31:33 crc kubenswrapper[4912]: I1208 21:31:33.897923 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpq5d" event={"ID":"90e4ab2d-73de-458b-adc0-ffb3b9f50de2","Type":"ContainerDied","Data":"b5d256671ddd0bd26a871c6cc69773c55b484e543047dd77c3831cada5ac9a10"} Dec 08 21:31:33 crc kubenswrapper[4912]: I1208 21:31:33.898216 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpq5d" event={"ID":"90e4ab2d-73de-458b-adc0-ffb3b9f50de2","Type":"ContainerStarted","Data":"a9dac5f0b7a432aaeebc8e731f0a2aab51bcc708937f25e52c8e243f8a261714"} Dec 08 21:31:36 crc kubenswrapper[4912]: I1208 21:31:36.427091 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:36 crc kubenswrapper[4912]: I1208 21:31:36.427110 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:36 crc kubenswrapper[4912]: I1208 21:31:36.427110 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:36 crc kubenswrapper[4912]: I1208 21:31:36.428173 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" Dec 08 21:31:36 crc kubenswrapper[4912]: I1208 21:31:36.428211 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:31:36 crc kubenswrapper[4912]: I1208 21:31:36.428333 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:31:37 crc kubenswrapper[4912]: I1208 21:31:37.427584 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:37 crc kubenswrapper[4912]: I1208 21:31:37.428651 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" Dec 08 21:31:37 crc kubenswrapper[4912]: I1208 21:31:37.636635 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-7t6lg"] Dec 08 21:31:37 crc kubenswrapper[4912]: W1208 21:31:37.652608 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7a6b0fa_1136_439f_a0bd_96f6855dec8f.slice/crio-37288893552a619b07edbed388a88ebb1501785b9a7db2c1b1f71f6c18185a2d WatchSource:0}: Error finding container 37288893552a619b07edbed388a88ebb1501785b9a7db2c1b1f71f6c18185a2d: Status 404 returned error can't find the container with id 37288893552a619b07edbed388a88ebb1501785b9a7db2c1b1f71f6c18185a2d Dec 08 21:31:37 crc kubenswrapper[4912]: I1208 21:31:37.655233 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-rc26h"] Dec 08 21:31:37 crc kubenswrapper[4912]: W1208 21:31:37.690620 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9c5dc1e_7823_4b2d_9983_8e23244bb2b9.slice/crio-e5d7afe05f01442f90fccd940b8dc6523772d5c5285c10d6f5047815cf09f253 WatchSource:0}: Error finding container e5d7afe05f01442f90fccd940b8dc6523772d5c5285c10d6f5047815cf09f253: Status 404 returned error can't find the container with id e5d7afe05f01442f90fccd940b8dc6523772d5c5285c10d6f5047815cf09f253 Dec 08 21:31:37 crc kubenswrapper[4912]: I1208 21:31:37.737197 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc"] Dec 08 21:31:37 crc kubenswrapper[4912]: W1208 21:31:37.750375 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9a7a69b_d12e_48e7_899f_2c919d23d906.slice/crio-b9045732ae1d24e73f5d760b483ded9f6bd08f4a3adbe7d0b9a16b5d7caaaf6b WatchSource:0}: Error finding container b9045732ae1d24e73f5d760b483ded9f6bd08f4a3adbe7d0b9a16b5d7caaaf6b: Status 404 returned error can't find the container with id b9045732ae1d24e73f5d760b483ded9f6bd08f4a3adbe7d0b9a16b5d7caaaf6b Dec 08 21:31:37 crc kubenswrapper[4912]: I1208 21:31:37.865971 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp"] Dec 08 21:31:37 crc kubenswrapper[4912]: W1208 21:31:37.880002 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod065174a4_9aac_46c1_b83c_71861f156ee3.slice/crio-03f3e31be49d19853e49ed21aea00b3f08049a9c6c63b2bf60d80e96376f4282 WatchSource:0}: Error finding container 03f3e31be49d19853e49ed21aea00b3f08049a9c6c63b2bf60d80e96376f4282: Status 404 returned error can't find the container with id 03f3e31be49d19853e49ed21aea00b3f08049a9c6c63b2bf60d80e96376f4282 Dec 08 21:31:37 crc kubenswrapper[4912]: I1208 21:31:37.985669 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" event={"ID":"e7a6b0fa-1136-439f-a0bd-96f6855dec8f","Type":"ContainerStarted","Data":"37288893552a619b07edbed388a88ebb1501785b9a7db2c1b1f71f6c18185a2d"} Dec 08 21:31:37 crc kubenswrapper[4912]: I1208 21:31:37.991472 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" event={"ID":"b9c5dc1e-7823-4b2d-9983-8e23244bb2b9","Type":"ContainerStarted","Data":"e5d7afe05f01442f90fccd940b8dc6523772d5c5285c10d6f5047815cf09f253"} Dec 08 21:31:37 crc kubenswrapper[4912]: I1208 21:31:37.993055 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" event={"ID":"e9a7a69b-d12e-48e7-899f-2c919d23d906","Type":"ContainerStarted","Data":"b9045732ae1d24e73f5d760b483ded9f6bd08f4a3adbe7d0b9a16b5d7caaaf6b"} Dec 08 21:31:38 crc kubenswrapper[4912]: I1208 21:31:38.001458 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" event={"ID":"065174a4-9aac-46c1-b83c-71861f156ee3","Type":"ContainerStarted","Data":"03f3e31be49d19853e49ed21aea00b3f08049a9c6c63b2bf60d80e96376f4282"} Dec 08 21:31:38 crc kubenswrapper[4912]: I1208 21:31:38.439289 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:38 crc kubenswrapper[4912]: I1208 21:31:38.440009 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" Dec 08 21:31:38 crc kubenswrapper[4912]: I1208 21:31:38.850234 4912 scope.go:117] "RemoveContainer" containerID="67e1d80a9cddbf7dfa3f3cb1c5dc46e23094b74b2c2df2cebde82778208d8e83" Dec 08 21:31:38 crc kubenswrapper[4912]: I1208 21:31:38.926881 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6"] Dec 08 21:31:38 crc kubenswrapper[4912]: W1208 21:31:38.994559 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b4799e4_efd5_4f47_b53b_a056d4a3d046.slice/crio-e565b064d26a168962c17681a1072b9de3b3093611861ce4d2084e5823f6c295 WatchSource:0}: Error finding container e565b064d26a168962c17681a1072b9de3b3093611861ce4d2084e5823f6c295: Status 404 returned error can't find the container with id e565b064d26a168962c17681a1072b9de3b3093611861ce4d2084e5823f6c295 Dec 08 21:31:39 crc kubenswrapper[4912]: I1208 21:31:39.014658 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-rp5rf_959add28-5508-49d7-8fe3-404acef398b0/kube-multus/2.log" Dec 08 21:31:39 crc kubenswrapper[4912]: I1208 21:31:39.017151 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" event={"ID":"7b4799e4-efd5-4f47-b53b-a056d4a3d046","Type":"ContainerStarted","Data":"e565b064d26a168962c17681a1072b9de3b3093611861ce4d2084e5823f6c295"} Dec 08 21:31:41 crc kubenswrapper[4912]: I1208 21:31:41.575911 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mbt8h" Dec 08 21:32:01 crc kubenswrapper[4912]: E1208 21:32:01.887658 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 08 21:32:01 crc kubenswrapper[4912]: E1208 21:32:01.888873 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bpmfw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-vpq5d_openshift-marketplace(90e4ab2d-73de-458b-adc0-ffb3b9f50de2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:32:01 crc kubenswrapper[4912]: E1208 21:32:01.890105 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-vpq5d" podUID="90e4ab2d-73de-458b-adc0-ffb3b9f50de2" Dec 08 21:32:02 crc kubenswrapper[4912]: E1208 21:32:02.294589 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 08 21:32:02 crc kubenswrapper[4912]: E1208 21:32:02.294886 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_openshift-operators(e9a7a69b-d12e-48e7-899f-2c919d23d906): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:32:02 crc kubenswrapper[4912]: E1208 21:32:02.296159 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" podUID="e9a7a69b-d12e-48e7-899f-2c919d23d906" Dec 08 21:32:02 crc kubenswrapper[4912]: E1208 21:32:02.307083 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 08 21:32:02 crc kubenswrapper[4912]: E1208 21:32:02.307357 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_openshift-operators(7b4799e4-efd5-4f47-b53b-a056d4a3d046): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:32:02 crc kubenswrapper[4912]: E1208 21:32:02.308651 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" podUID="7b4799e4-efd5-4f47-b53b-a056d4a3d046" Dec 08 21:32:02 crc kubenswrapper[4912]: E1208 21:32:02.362013 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" podUID="7b4799e4-efd5-4f47-b53b-a056d4a3d046" Dec 08 21:32:02 crc kubenswrapper[4912]: E1208 21:32:02.362311 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" podUID="e9a7a69b-d12e-48e7-899f-2c919d23d906" Dec 08 21:32:02 crc kubenswrapper[4912]: I1208 21:32:02.965882 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:32:02 crc kubenswrapper[4912]: I1208 21:32:02.965964 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:32:03 crc kubenswrapper[4912]: E1208 21:32:03.163250 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-vpq5d" podUID="90e4ab2d-73de-458b-adc0-ffb3b9f50de2" Dec 08 21:32:03 crc kubenswrapper[4912]: E1208 21:32:03.303022 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385" Dec 08 21:32:03 crc kubenswrapper[4912]: E1208 21:32:03.303264 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gpqtd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-5446b9c989-7t6lg_openshift-operators(e7a6b0fa-1136-439f-a0bd-96f6855dec8f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:32:03 crc kubenswrapper[4912]: E1208 21:32:03.304514 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" podUID="e7a6b0fa-1136-439f-a0bd-96f6855dec8f" Dec 08 21:32:03 crc kubenswrapper[4912]: E1208 21:32:03.366891 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385\\\"\"" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" podUID="e7a6b0fa-1136-439f-a0bd-96f6855dec8f" Dec 08 21:32:05 crc kubenswrapper[4912]: E1208 21:32:05.309874 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3" Dec 08 21:32:05 crc kubenswrapper[4912]: E1208 21:32:05.312298 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8g76v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-668cf9dfbb-gxbwp_openshift-operators(065174a4-9aac-46c1-b83c-71861f156ee3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 21:32:05 crc kubenswrapper[4912]: E1208 21:32:05.313578 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" podUID="065174a4-9aac-46c1-b83c-71861f156ee3" Dec 08 21:32:05 crc kubenswrapper[4912]: E1208 21:32:05.385083 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3\\\"\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" podUID="065174a4-9aac-46c1-b83c-71861f156ee3" Dec 08 21:32:06 crc kubenswrapper[4912]: I1208 21:32:06.393470 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" event={"ID":"b9c5dc1e-7823-4b2d-9983-8e23244bb2b9","Type":"ContainerStarted","Data":"d2edfd533e27d7e0926748ba35c4d8da92163393f7edb146af95e75a572a8dee"} Dec 08 21:32:06 crc kubenswrapper[4912]: I1208 21:32:06.423218 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" podStartSLOduration=17.799747115 podStartE2EDuration="45.423194149s" podCreationTimestamp="2025-12-08 21:31:21 +0000 UTC" firstStartedPulling="2025-12-08 21:31:37.693951385 +0000 UTC m=+779.556953468" lastFinishedPulling="2025-12-08 21:32:05.317398419 +0000 UTC m=+807.180400502" observedRunningTime="2025-12-08 21:32:06.420771771 +0000 UTC m=+808.283773854" watchObservedRunningTime="2025-12-08 21:32:06.423194149 +0000 UTC m=+808.286196232" Dec 08 21:32:07 crc kubenswrapper[4912]: I1208 21:32:07.398851 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:32:07 crc kubenswrapper[4912]: I1208 21:32:07.569144 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-rc26h" Dec 08 21:32:15 crc kubenswrapper[4912]: I1208 21:32:15.997334 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-btb4l"] Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:15.999604 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-btb4l" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.001512 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-wjwt6"] Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.002504 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-wjwt6" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.003480 4912 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-726p4" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.005387 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.008928 4912 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-cngnf" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.010580 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-57j6j"] Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.011743 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-57j6j" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.014952 4912 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-8kvq5" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.022571 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-btb4l"] Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.026508 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.030427 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-wjwt6"] Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.042574 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-57j6j"] Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.156431 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z4n6\" (UniqueName: \"kubernetes.io/projected/a7bc3095-bbd0-4351-aca3-8537df3c82a1-kube-api-access-7z4n6\") pod \"cert-manager-webhook-5655c58dd6-btb4l\" (UID: \"a7bc3095-bbd0-4351-aca3-8537df3c82a1\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-btb4l" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.156855 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tbj2\" (UniqueName: \"kubernetes.io/projected/68ebf962-3731-4e13-87a4-c34dc485ab22-kube-api-access-5tbj2\") pod \"cert-manager-5b446d88c5-wjwt6\" (UID: \"68ebf962-3731-4e13-87a4-c34dc485ab22\") " pod="cert-manager/cert-manager-5b446d88c5-wjwt6" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.156989 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gxd2\" (UniqueName: \"kubernetes.io/projected/f8d9e945-baf1-49fc-9c67-c1005db9f615-kube-api-access-6gxd2\") pod \"cert-manager-cainjector-7f985d654d-57j6j\" (UID: \"f8d9e945-baf1-49fc-9c67-c1005db9f615\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-57j6j" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.258601 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z4n6\" (UniqueName: \"kubernetes.io/projected/a7bc3095-bbd0-4351-aca3-8537df3c82a1-kube-api-access-7z4n6\") pod \"cert-manager-webhook-5655c58dd6-btb4l\" (UID: \"a7bc3095-bbd0-4351-aca3-8537df3c82a1\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-btb4l" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.258682 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tbj2\" (UniqueName: \"kubernetes.io/projected/68ebf962-3731-4e13-87a4-c34dc485ab22-kube-api-access-5tbj2\") pod \"cert-manager-5b446d88c5-wjwt6\" (UID: \"68ebf962-3731-4e13-87a4-c34dc485ab22\") " pod="cert-manager/cert-manager-5b446d88c5-wjwt6" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.258723 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gxd2\" (UniqueName: \"kubernetes.io/projected/f8d9e945-baf1-49fc-9c67-c1005db9f615-kube-api-access-6gxd2\") pod \"cert-manager-cainjector-7f985d654d-57j6j\" (UID: \"f8d9e945-baf1-49fc-9c67-c1005db9f615\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-57j6j" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.289261 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gxd2\" (UniqueName: \"kubernetes.io/projected/f8d9e945-baf1-49fc-9c67-c1005db9f615-kube-api-access-6gxd2\") pod \"cert-manager-cainjector-7f985d654d-57j6j\" (UID: \"f8d9e945-baf1-49fc-9c67-c1005db9f615\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-57j6j" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.292007 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z4n6\" (UniqueName: \"kubernetes.io/projected/a7bc3095-bbd0-4351-aca3-8537df3c82a1-kube-api-access-7z4n6\") pod \"cert-manager-webhook-5655c58dd6-btb4l\" (UID: \"a7bc3095-bbd0-4351-aca3-8537df3c82a1\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-btb4l" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.292105 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tbj2\" (UniqueName: \"kubernetes.io/projected/68ebf962-3731-4e13-87a4-c34dc485ab22-kube-api-access-5tbj2\") pod \"cert-manager-5b446d88c5-wjwt6\" (UID: \"68ebf962-3731-4e13-87a4-c34dc485ab22\") " pod="cert-manager/cert-manager-5b446d88c5-wjwt6" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.330486 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-btb4l" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.341240 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-wjwt6" Dec 08 21:32:16 crc kubenswrapper[4912]: I1208 21:32:16.349769 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-57j6j" Dec 08 21:32:17 crc kubenswrapper[4912]: I1208 21:32:17.432391 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-57j6j"] Dec 08 21:32:17 crc kubenswrapper[4912]: I1208 21:32:17.458851 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-57j6j" event={"ID":"f8d9e945-baf1-49fc-9c67-c1005db9f615","Type":"ContainerStarted","Data":"f56d36261f6dcfaa730632dcac46421b2f6853feff02ea21cbe48c4bac513ba1"} Dec 08 21:32:17 crc kubenswrapper[4912]: I1208 21:32:17.554457 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-btb4l"] Dec 08 21:32:17 crc kubenswrapper[4912]: W1208 21:32:17.556560 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7bc3095_bbd0_4351_aca3_8537df3c82a1.slice/crio-27abd10f51a2f248038aa878a97726987d82c3c853c85422c2cabd4d4fb902f5 WatchSource:0}: Error finding container 27abd10f51a2f248038aa878a97726987d82c3c853c85422c2cabd4d4fb902f5: Status 404 returned error can't find the container with id 27abd10f51a2f248038aa878a97726987d82c3c853c85422c2cabd4d4fb902f5 Dec 08 21:32:17 crc kubenswrapper[4912]: I1208 21:32:17.568142 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-wjwt6"] Dec 08 21:32:17 crc kubenswrapper[4912]: W1208 21:32:17.573934 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68ebf962_3731_4e13_87a4_c34dc485ab22.slice/crio-151a085873b43078da8a93d759401ec12d5487b1b66016127f7554890d8df4ba WatchSource:0}: Error finding container 151a085873b43078da8a93d759401ec12d5487b1b66016127f7554890d8df4ba: Status 404 returned error can't find the container with id 151a085873b43078da8a93d759401ec12d5487b1b66016127f7554890d8df4ba Dec 08 21:32:18 crc kubenswrapper[4912]: I1208 21:32:18.466806 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-wjwt6" event={"ID":"68ebf962-3731-4e13-87a4-c34dc485ab22","Type":"ContainerStarted","Data":"151a085873b43078da8a93d759401ec12d5487b1b66016127f7554890d8df4ba"} Dec 08 21:32:18 crc kubenswrapper[4912]: I1208 21:32:18.469063 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-btb4l" event={"ID":"a7bc3095-bbd0-4351-aca3-8537df3c82a1","Type":"ContainerStarted","Data":"27abd10f51a2f248038aa878a97726987d82c3c853c85422c2cabd4d4fb902f5"} Dec 08 21:32:26 crc kubenswrapper[4912]: I1208 21:32:26.518692 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" event={"ID":"065174a4-9aac-46c1-b83c-71861f156ee3","Type":"ContainerStarted","Data":"91f3dc3448f69f2fea39861c143b59437fe1e0b236c8e811919bcb2eaa24b6a5"} Dec 08 21:32:26 crc kubenswrapper[4912]: I1208 21:32:26.522610 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" event={"ID":"e9a7a69b-d12e-48e7-899f-2c919d23d906","Type":"ContainerStarted","Data":"bebbc818e203efc791d4eee7ef7af568ec283379dc35fe2cad534af6bd7e7d19"} Dec 08 21:32:26 crc kubenswrapper[4912]: I1208 21:32:26.525219 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" event={"ID":"7b4799e4-efd5-4f47-b53b-a056d4a3d046","Type":"ContainerStarted","Data":"ac77d551096a720fe0ba10c8e0f2693e8a605f9d62c2ba549485cd9ab539485b"} Dec 08 21:32:26 crc kubenswrapper[4912]: I1208 21:32:26.542530 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-gxbwp" podStartSLOduration=20.020409589 podStartE2EDuration="1m6.542511014s" podCreationTimestamp="2025-12-08 21:31:20 +0000 UTC" firstStartedPulling="2025-12-08 21:31:37.893216546 +0000 UTC m=+779.756218629" lastFinishedPulling="2025-12-08 21:32:24.415317971 +0000 UTC m=+826.278320054" observedRunningTime="2025-12-08 21:32:26.538733284 +0000 UTC m=+828.401735387" watchObservedRunningTime="2025-12-08 21:32:26.542511014 +0000 UTC m=+828.405513097" Dec 08 21:32:26 crc kubenswrapper[4912]: I1208 21:32:26.565889 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6" podStartSLOduration=20.398319052 podStartE2EDuration="1m5.565863182s" podCreationTimestamp="2025-12-08 21:31:21 +0000 UTC" firstStartedPulling="2025-12-08 21:31:39.001140727 +0000 UTC m=+780.864142810" lastFinishedPulling="2025-12-08 21:32:24.168684857 +0000 UTC m=+826.031686940" observedRunningTime="2025-12-08 21:32:26.561532678 +0000 UTC m=+828.424534751" watchObservedRunningTime="2025-12-08 21:32:26.565863182 +0000 UTC m=+828.428865265" Dec 08 21:32:26 crc kubenswrapper[4912]: I1208 21:32:26.602768 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-785d46d47d-wgntc" podStartSLOduration=19.189941952 podStartE2EDuration="1m5.602751134s" podCreationTimestamp="2025-12-08 21:31:21 +0000 UTC" firstStartedPulling="2025-12-08 21:31:37.754764418 +0000 UTC m=+779.617766501" lastFinishedPulling="2025-12-08 21:32:24.1675736 +0000 UTC m=+826.030575683" observedRunningTime="2025-12-08 21:32:26.599809443 +0000 UTC m=+828.462811526" watchObservedRunningTime="2025-12-08 21:32:26.602751134 +0000 UTC m=+828.465753207" Dec 08 21:32:27 crc kubenswrapper[4912]: I1208 21:32:27.534844 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" event={"ID":"e7a6b0fa-1136-439f-a0bd-96f6855dec8f","Type":"ContainerStarted","Data":"939e08e20b3e15301a58efe30c34b355fdd686a4171b62090c33cbcbd1b46d1f"} Dec 08 21:32:27 crc kubenswrapper[4912]: I1208 21:32:27.536656 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-57j6j" event={"ID":"f8d9e945-baf1-49fc-9c67-c1005db9f615","Type":"ContainerStarted","Data":"22e97e06021b73eb3aa3d9e797f870f5b3aa0a1c6e5eedb08116aa055df1dda8"} Dec 08 21:32:27 crc kubenswrapper[4912]: I1208 21:32:27.539262 4912 generic.go:334] "Generic (PLEG): container finished" podID="90e4ab2d-73de-458b-adc0-ffb3b9f50de2" containerID="e7251ba4db58fe845984b6c5050372481f7bcb7b1f72d37c9eb87608c83b6904" exitCode=0 Dec 08 21:32:27 crc kubenswrapper[4912]: I1208 21:32:27.539361 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpq5d" event={"ID":"90e4ab2d-73de-458b-adc0-ffb3b9f50de2","Type":"ContainerDied","Data":"e7251ba4db58fe845984b6c5050372481f7bcb7b1f72d37c9eb87608c83b6904"} Dec 08 21:32:27 crc kubenswrapper[4912]: I1208 21:32:27.542535 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-btb4l" event={"ID":"a7bc3095-bbd0-4351-aca3-8537df3c82a1","Type":"ContainerStarted","Data":"ce359fd119c392be3927cfba8478d599f56eb10ed6d4c265aafdc5a1e620bb35"} Dec 08 21:32:27 crc kubenswrapper[4912]: I1208 21:32:27.543103 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-btb4l" Dec 08 21:32:27 crc kubenswrapper[4912]: I1208 21:32:27.545642 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-wjwt6" event={"ID":"68ebf962-3731-4e13-87a4-c34dc485ab22","Type":"ContainerStarted","Data":"09180ca5906ecf42e34e56dd9d37383f1c1c943d3131220696b26013292bba2a"} Dec 08 21:32:27 crc kubenswrapper[4912]: I1208 21:32:27.593191 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" podStartSLOduration=18.970663651 podStartE2EDuration="1m6.593161646s" podCreationTimestamp="2025-12-08 21:31:21 +0000 UTC" firstStartedPulling="2025-12-08 21:31:37.67536749 +0000 UTC m=+779.538369573" lastFinishedPulling="2025-12-08 21:32:25.297865485 +0000 UTC m=+827.160867568" observedRunningTime="2025-12-08 21:32:27.568992458 +0000 UTC m=+829.431994541" watchObservedRunningTime="2025-12-08 21:32:27.593161646 +0000 UTC m=+829.456163729" Dec 08 21:32:27 crc kubenswrapper[4912]: I1208 21:32:27.611378 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-wjwt6" podStartSLOduration=3.366794384 podStartE2EDuration="12.611351651s" podCreationTimestamp="2025-12-08 21:32:15 +0000 UTC" firstStartedPulling="2025-12-08 21:32:17.576780571 +0000 UTC m=+819.439782654" lastFinishedPulling="2025-12-08 21:32:26.821337838 +0000 UTC m=+828.684339921" observedRunningTime="2025-12-08 21:32:27.60879697 +0000 UTC m=+829.471799053" watchObservedRunningTime="2025-12-08 21:32:27.611351651 +0000 UTC m=+829.474353734" Dec 08 21:32:27 crc kubenswrapper[4912]: I1208 21:32:27.632722 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-57j6j" podStartSLOduration=4.199914846 podStartE2EDuration="12.632700161s" podCreationTimestamp="2025-12-08 21:32:15 +0000 UTC" firstStartedPulling="2025-12-08 21:32:17.447505641 +0000 UTC m=+819.310507724" lastFinishedPulling="2025-12-08 21:32:25.880290956 +0000 UTC m=+827.743293039" observedRunningTime="2025-12-08 21:32:27.628680495 +0000 UTC m=+829.491682578" watchObservedRunningTime="2025-12-08 21:32:27.632700161 +0000 UTC m=+829.495702244" Dec 08 21:32:27 crc kubenswrapper[4912]: I1208 21:32:27.668306 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-btb4l" podStartSLOduration=3.395890069 podStartE2EDuration="12.668268891s" podCreationTimestamp="2025-12-08 21:32:15 +0000 UTC" firstStartedPulling="2025-12-08 21:32:17.559007656 +0000 UTC m=+819.422009739" lastFinishedPulling="2025-12-08 21:32:26.831386458 +0000 UTC m=+828.694388561" observedRunningTime="2025-12-08 21:32:27.651332246 +0000 UTC m=+829.514334329" watchObservedRunningTime="2025-12-08 21:32:27.668268891 +0000 UTC m=+829.531270984" Dec 08 21:32:28 crc kubenswrapper[4912]: I1208 21:32:28.554813 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpq5d" event={"ID":"90e4ab2d-73de-458b-adc0-ffb3b9f50de2","Type":"ContainerStarted","Data":"fd979971d00d34752baf4e34b3390546274b2f69c6a0d7b67d2cded5f25a91c9"} Dec 08 21:32:28 crc kubenswrapper[4912]: I1208 21:32:28.577310 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vpq5d" podStartSLOduration=2.315457249 podStartE2EDuration="56.577286287s" podCreationTimestamp="2025-12-08 21:31:32 +0000 UTC" firstStartedPulling="2025-12-08 21:31:33.900384068 +0000 UTC m=+775.763386151" lastFinishedPulling="2025-12-08 21:32:28.162213106 +0000 UTC m=+830.025215189" observedRunningTime="2025-12-08 21:32:28.573211829 +0000 UTC m=+830.436213922" watchObservedRunningTime="2025-12-08 21:32:28.577286287 +0000 UTC m=+830.440288360" Dec 08 21:32:31 crc kubenswrapper[4912]: I1208 21:32:31.986530 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:32:31 crc kubenswrapper[4912]: I1208 21:32:31.988595 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-7t6lg" Dec 08 21:32:32 crc kubenswrapper[4912]: I1208 21:32:32.859089 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:32:32 crc kubenswrapper[4912]: I1208 21:32:32.859152 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:32:32 crc kubenswrapper[4912]: I1208 21:32:32.965131 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:32:32 crc kubenswrapper[4912]: I1208 21:32:32.965225 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:32:32 crc kubenswrapper[4912]: I1208 21:32:32.965331 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:32:32 crc kubenswrapper[4912]: I1208 21:32:32.966153 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8106664a8b2aa8fe44f0e624ddefd9ea7c76fb2bf84c756329c17f67bc09391d"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:32:32 crc kubenswrapper[4912]: I1208 21:32:32.966236 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://8106664a8b2aa8fe44f0e624ddefd9ea7c76fb2bf84c756329c17f67bc09391d" gracePeriod=600 Dec 08 21:32:33 crc kubenswrapper[4912]: I1208 21:32:33.913619 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vpq5d" podUID="90e4ab2d-73de-458b-adc0-ffb3b9f50de2" containerName="registry-server" probeResult="failure" output=< Dec 08 21:32:33 crc kubenswrapper[4912]: timeout: failed to connect service ":50051" within 1s Dec 08 21:32:33 crc kubenswrapper[4912]: > Dec 08 21:32:35 crc kubenswrapper[4912]: I1208 21:32:35.594562 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="8106664a8b2aa8fe44f0e624ddefd9ea7c76fb2bf84c756329c17f67bc09391d" exitCode=0 Dec 08 21:32:35 crc kubenswrapper[4912]: I1208 21:32:35.594628 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"8106664a8b2aa8fe44f0e624ddefd9ea7c76fb2bf84c756329c17f67bc09391d"} Dec 08 21:32:35 crc kubenswrapper[4912]: I1208 21:32:35.594665 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"e561243c0a08b6a3c2b979f79371dccf1fa3fd8f2001bd69298ef149e0ac707f"} Dec 08 21:32:35 crc kubenswrapper[4912]: I1208 21:32:35.594687 4912 scope.go:117] "RemoveContainer" containerID="6f5ad71830b88e4775860604df0b35d82fd83908839551688064bd6336508a17" Dec 08 21:32:36 crc kubenswrapper[4912]: I1208 21:32:36.336858 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-btb4l" Dec 08 21:32:42 crc kubenswrapper[4912]: I1208 21:32:42.964058 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.025959 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vpq5d" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.095731 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vpq5d"] Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.195883 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gc68w"] Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.196199 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gc68w" podUID="66f84380-1e3e-4023-b2be-78c959851aef" containerName="registry-server" containerID="cri-o://a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c" gracePeriod=2 Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.623715 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.647057 4912 generic.go:334] "Generic (PLEG): container finished" podID="66f84380-1e3e-4023-b2be-78c959851aef" containerID="a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c" exitCode=0 Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.647146 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gc68w" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.647179 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gc68w" event={"ID":"66f84380-1e3e-4023-b2be-78c959851aef","Type":"ContainerDied","Data":"a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c"} Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.647248 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gc68w" event={"ID":"66f84380-1e3e-4023-b2be-78c959851aef","Type":"ContainerDied","Data":"045234994c8ab93378e33b01e2408fc1ae712985e42ca690af1b9fcf591cee1c"} Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.647276 4912 scope.go:117] "RemoveContainer" containerID="a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.671996 4912 scope.go:117] "RemoveContainer" containerID="80d8a17f73f1c097a23b97a4fd9503451850f7b3372d8b2bba142ad02f336755" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.691149 4912 scope.go:117] "RemoveContainer" containerID="ff70af9a961c6f4823541629e5551a1435849f7e0603222d8bfaac5a54f1083b" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.716578 4912 scope.go:117] "RemoveContainer" containerID="a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c" Dec 08 21:32:43 crc kubenswrapper[4912]: E1208 21:32:43.717193 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c\": container with ID starting with a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c not found: ID does not exist" containerID="a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.717234 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c"} err="failed to get container status \"a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c\": rpc error: code = NotFound desc = could not find container \"a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c\": container with ID starting with a287a9cc5066e54f84215eec5d1646112ffadcd2072aed311dd69f9e75b4174c not found: ID does not exist" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.717269 4912 scope.go:117] "RemoveContainer" containerID="80d8a17f73f1c097a23b97a4fd9503451850f7b3372d8b2bba142ad02f336755" Dec 08 21:32:43 crc kubenswrapper[4912]: E1208 21:32:43.720732 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80d8a17f73f1c097a23b97a4fd9503451850f7b3372d8b2bba142ad02f336755\": container with ID starting with 80d8a17f73f1c097a23b97a4fd9503451850f7b3372d8b2bba142ad02f336755 not found: ID does not exist" containerID="80d8a17f73f1c097a23b97a4fd9503451850f7b3372d8b2bba142ad02f336755" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.720805 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80d8a17f73f1c097a23b97a4fd9503451850f7b3372d8b2bba142ad02f336755"} err="failed to get container status \"80d8a17f73f1c097a23b97a4fd9503451850f7b3372d8b2bba142ad02f336755\": rpc error: code = NotFound desc = could not find container \"80d8a17f73f1c097a23b97a4fd9503451850f7b3372d8b2bba142ad02f336755\": container with ID starting with 80d8a17f73f1c097a23b97a4fd9503451850f7b3372d8b2bba142ad02f336755 not found: ID does not exist" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.720845 4912 scope.go:117] "RemoveContainer" containerID="ff70af9a961c6f4823541629e5551a1435849f7e0603222d8bfaac5a54f1083b" Dec 08 21:32:43 crc kubenswrapper[4912]: E1208 21:32:43.721293 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff70af9a961c6f4823541629e5551a1435849f7e0603222d8bfaac5a54f1083b\": container with ID starting with ff70af9a961c6f4823541629e5551a1435849f7e0603222d8bfaac5a54f1083b not found: ID does not exist" containerID="ff70af9a961c6f4823541629e5551a1435849f7e0603222d8bfaac5a54f1083b" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.721337 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff70af9a961c6f4823541629e5551a1435849f7e0603222d8bfaac5a54f1083b"} err="failed to get container status \"ff70af9a961c6f4823541629e5551a1435849f7e0603222d8bfaac5a54f1083b\": rpc error: code = NotFound desc = could not find container \"ff70af9a961c6f4823541629e5551a1435849f7e0603222d8bfaac5a54f1083b\": container with ID starting with ff70af9a961c6f4823541629e5551a1435849f7e0603222d8bfaac5a54f1083b not found: ID does not exist" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.774949 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f84380-1e3e-4023-b2be-78c959851aef-utilities\") pod \"66f84380-1e3e-4023-b2be-78c959851aef\" (UID: \"66f84380-1e3e-4023-b2be-78c959851aef\") " Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.775011 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ct2d\" (UniqueName: \"kubernetes.io/projected/66f84380-1e3e-4023-b2be-78c959851aef-kube-api-access-6ct2d\") pod \"66f84380-1e3e-4023-b2be-78c959851aef\" (UID: \"66f84380-1e3e-4023-b2be-78c959851aef\") " Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.775109 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f84380-1e3e-4023-b2be-78c959851aef-catalog-content\") pod \"66f84380-1e3e-4023-b2be-78c959851aef\" (UID: \"66f84380-1e3e-4023-b2be-78c959851aef\") " Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.776990 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66f84380-1e3e-4023-b2be-78c959851aef-utilities" (OuterVolumeSpecName: "utilities") pod "66f84380-1e3e-4023-b2be-78c959851aef" (UID: "66f84380-1e3e-4023-b2be-78c959851aef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.782465 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66f84380-1e3e-4023-b2be-78c959851aef-kube-api-access-6ct2d" (OuterVolumeSpecName: "kube-api-access-6ct2d") pod "66f84380-1e3e-4023-b2be-78c959851aef" (UID: "66f84380-1e3e-4023-b2be-78c959851aef"). InnerVolumeSpecName "kube-api-access-6ct2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.877004 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f84380-1e3e-4023-b2be-78c959851aef-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.877064 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ct2d\" (UniqueName: \"kubernetes.io/projected/66f84380-1e3e-4023-b2be-78c959851aef-kube-api-access-6ct2d\") on node \"crc\" DevicePath \"\"" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.901745 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66f84380-1e3e-4023-b2be-78c959851aef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "66f84380-1e3e-4023-b2be-78c959851aef" (UID: "66f84380-1e3e-4023-b2be-78c959851aef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.978466 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f84380-1e3e-4023-b2be-78c959851aef-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.982423 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gc68w"] Dec 08 21:32:43 crc kubenswrapper[4912]: I1208 21:32:43.990243 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gc68w"] Dec 08 21:32:44 crc kubenswrapper[4912]: I1208 21:32:44.435277 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66f84380-1e3e-4023-b2be-78c959851aef" path="/var/lib/kubelet/pods/66f84380-1e3e-4023-b2be-78c959851aef/volumes" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.150069 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl"] Dec 08 21:33:01 crc kubenswrapper[4912]: E1208 21:33:01.151300 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f84380-1e3e-4023-b2be-78c959851aef" containerName="extract-content" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.151319 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f84380-1e3e-4023-b2be-78c959851aef" containerName="extract-content" Dec 08 21:33:01 crc kubenswrapper[4912]: E1208 21:33:01.151344 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f84380-1e3e-4023-b2be-78c959851aef" containerName="registry-server" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.151352 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f84380-1e3e-4023-b2be-78c959851aef" containerName="registry-server" Dec 08 21:33:01 crc kubenswrapper[4912]: E1208 21:33:01.151387 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f84380-1e3e-4023-b2be-78c959851aef" containerName="extract-utilities" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.151395 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f84380-1e3e-4023-b2be-78c959851aef" containerName="extract-utilities" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.151548 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="66f84380-1e3e-4023-b2be-78c959851aef" containerName="registry-server" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.152652 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.155237 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.167066 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/23c5450e-4908-4bb9-a24f-09b8016d6b0a-util\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl\" (UID: \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.167138 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvglv\" (UniqueName: \"kubernetes.io/projected/23c5450e-4908-4bb9-a24f-09b8016d6b0a-kube-api-access-pvglv\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl\" (UID: \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.167211 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/23c5450e-4908-4bb9-a24f-09b8016d6b0a-bundle\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl\" (UID: \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.172714 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl"] Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.269301 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvglv\" (UniqueName: \"kubernetes.io/projected/23c5450e-4908-4bb9-a24f-09b8016d6b0a-kube-api-access-pvglv\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl\" (UID: \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.269714 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/23c5450e-4908-4bb9-a24f-09b8016d6b0a-bundle\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl\" (UID: \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.269778 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/23c5450e-4908-4bb9-a24f-09b8016d6b0a-util\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl\" (UID: \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.270418 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/23c5450e-4908-4bb9-a24f-09b8016d6b0a-util\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl\" (UID: \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.270475 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/23c5450e-4908-4bb9-a24f-09b8016d6b0a-bundle\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl\" (UID: \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.291996 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvglv\" (UniqueName: \"kubernetes.io/projected/23c5450e-4908-4bb9-a24f-09b8016d6b0a-kube-api-access-pvglv\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl\" (UID: \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.472665 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:01 crc kubenswrapper[4912]: I1208 21:33:01.814374 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl"] Dec 08 21:33:02 crc kubenswrapper[4912]: I1208 21:33:02.789264 4912 generic.go:334] "Generic (PLEG): container finished" podID="23c5450e-4908-4bb9-a24f-09b8016d6b0a" containerID="01ea4720b5031e9daf867d214595a9ed731af8a6ac028149ea56eeee4b3ddcfa" exitCode=0 Dec 08 21:33:02 crc kubenswrapper[4912]: I1208 21:33:02.789396 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" event={"ID":"23c5450e-4908-4bb9-a24f-09b8016d6b0a","Type":"ContainerDied","Data":"01ea4720b5031e9daf867d214595a9ed731af8a6ac028149ea56eeee4b3ddcfa"} Dec 08 21:33:02 crc kubenswrapper[4912]: I1208 21:33:02.789645 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" event={"ID":"23c5450e-4908-4bb9-a24f-09b8016d6b0a","Type":"ContainerStarted","Data":"289f76a5a63399d1215b4307906243c7bfd21e5ad961d994b05f7ce188f2acf6"} Dec 08 21:33:03 crc kubenswrapper[4912]: I1208 21:33:03.994886 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Dec 08 21:33:03 crc kubenswrapper[4912]: I1208 21:33:03.996500 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 08 21:33:03 crc kubenswrapper[4912]: I1208 21:33:03.998950 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.000354 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.006504 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.018012 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-526f8d98-6905-4dd8-ba70-8884c38c22bb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-526f8d98-6905-4dd8-ba70-8884c38c22bb\") pod \"minio\" (UID: \"abf2c0f0-6005-49df-b10f-03839679dd92\") " pod="minio-dev/minio" Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.018141 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq4fm\" (UniqueName: \"kubernetes.io/projected/abf2c0f0-6005-49df-b10f-03839679dd92-kube-api-access-jq4fm\") pod \"minio\" (UID: \"abf2c0f0-6005-49df-b10f-03839679dd92\") " pod="minio-dev/minio" Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.119765 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq4fm\" (UniqueName: \"kubernetes.io/projected/abf2c0f0-6005-49df-b10f-03839679dd92-kube-api-access-jq4fm\") pod \"minio\" (UID: \"abf2c0f0-6005-49df-b10f-03839679dd92\") " pod="minio-dev/minio" Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.119873 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-526f8d98-6905-4dd8-ba70-8884c38c22bb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-526f8d98-6905-4dd8-ba70-8884c38c22bb\") pod \"minio\" (UID: \"abf2c0f0-6005-49df-b10f-03839679dd92\") " pod="minio-dev/minio" Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.122495 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.122576 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-526f8d98-6905-4dd8-ba70-8884c38c22bb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-526f8d98-6905-4dd8-ba70-8884c38c22bb\") pod \"minio\" (UID: \"abf2c0f0-6005-49df-b10f-03839679dd92\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/81baa2093bd5f3b25664ea8c14ed60224d7264e0f2aa46759521e1128ac00e71/globalmount\"" pod="minio-dev/minio" Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.146291 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-526f8d98-6905-4dd8-ba70-8884c38c22bb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-526f8d98-6905-4dd8-ba70-8884c38c22bb\") pod \"minio\" (UID: \"abf2c0f0-6005-49df-b10f-03839679dd92\") " pod="minio-dev/minio" Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.147878 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq4fm\" (UniqueName: \"kubernetes.io/projected/abf2c0f0-6005-49df-b10f-03839679dd92-kube-api-access-jq4fm\") pod \"minio\" (UID: \"abf2c0f0-6005-49df-b10f-03839679dd92\") " pod="minio-dev/minio" Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.314631 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.538597 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 08 21:33:04 crc kubenswrapper[4912]: W1208 21:33:04.544693 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabf2c0f0_6005_49df_b10f_03839679dd92.slice/crio-27972c646b9a6fa6d81cf6ef9d6ee2791401b6cfcac6e5500dafa18e0a2f3c7d WatchSource:0}: Error finding container 27972c646b9a6fa6d81cf6ef9d6ee2791401b6cfcac6e5500dafa18e0a2f3c7d: Status 404 returned error can't find the container with id 27972c646b9a6fa6d81cf6ef9d6ee2791401b6cfcac6e5500dafa18e0a2f3c7d Dec 08 21:33:04 crc kubenswrapper[4912]: I1208 21:33:04.817415 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"abf2c0f0-6005-49df-b10f-03839679dd92","Type":"ContainerStarted","Data":"27972c646b9a6fa6d81cf6ef9d6ee2791401b6cfcac6e5500dafa18e0a2f3c7d"} Dec 08 21:33:09 crc kubenswrapper[4912]: I1208 21:33:09.852631 4912 generic.go:334] "Generic (PLEG): container finished" podID="23c5450e-4908-4bb9-a24f-09b8016d6b0a" containerID="2d9883eb10a930ed60b8c8b8ec44fd75373b4ad359fad52110976c1cf1bdc731" exitCode=0 Dec 08 21:33:09 crc kubenswrapper[4912]: I1208 21:33:09.852693 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" event={"ID":"23c5450e-4908-4bb9-a24f-09b8016d6b0a","Type":"ContainerDied","Data":"2d9883eb10a930ed60b8c8b8ec44fd75373b4ad359fad52110976c1cf1bdc731"} Dec 08 21:33:10 crc kubenswrapper[4912]: I1208 21:33:10.864673 4912 generic.go:334] "Generic (PLEG): container finished" podID="23c5450e-4908-4bb9-a24f-09b8016d6b0a" containerID="5e490d833ad6d192966cad89108549011b00f77aa85f496b1cb7a9180c93475e" exitCode=0 Dec 08 21:33:10 crc kubenswrapper[4912]: I1208 21:33:10.864854 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" event={"ID":"23c5450e-4908-4bb9-a24f-09b8016d6b0a","Type":"ContainerDied","Data":"5e490d833ad6d192966cad89108549011b00f77aa85f496b1cb7a9180c93475e"} Dec 08 21:33:13 crc kubenswrapper[4912]: I1208 21:33:13.875666 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:13 crc kubenswrapper[4912]: I1208 21:33:13.889396 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" event={"ID":"23c5450e-4908-4bb9-a24f-09b8016d6b0a","Type":"ContainerDied","Data":"289f76a5a63399d1215b4307906243c7bfd21e5ad961d994b05f7ce188f2acf6"} Dec 08 21:33:13 crc kubenswrapper[4912]: I1208 21:33:13.889463 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="289f76a5a63399d1215b4307906243c7bfd21e5ad961d994b05f7ce188f2acf6" Dec 08 21:33:13 crc kubenswrapper[4912]: I1208 21:33:13.889559 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl" Dec 08 21:33:14 crc kubenswrapper[4912]: I1208 21:33:14.074227 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvglv\" (UniqueName: \"kubernetes.io/projected/23c5450e-4908-4bb9-a24f-09b8016d6b0a-kube-api-access-pvglv\") pod \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\" (UID: \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\") " Dec 08 21:33:14 crc kubenswrapper[4912]: I1208 21:33:14.074293 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/23c5450e-4908-4bb9-a24f-09b8016d6b0a-bundle\") pod \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\" (UID: \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\") " Dec 08 21:33:14 crc kubenswrapper[4912]: I1208 21:33:14.074352 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/23c5450e-4908-4bb9-a24f-09b8016d6b0a-util\") pod \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\" (UID: \"23c5450e-4908-4bb9-a24f-09b8016d6b0a\") " Dec 08 21:33:14 crc kubenswrapper[4912]: I1208 21:33:14.075549 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23c5450e-4908-4bb9-a24f-09b8016d6b0a-bundle" (OuterVolumeSpecName: "bundle") pod "23c5450e-4908-4bb9-a24f-09b8016d6b0a" (UID: "23c5450e-4908-4bb9-a24f-09b8016d6b0a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:33:14 crc kubenswrapper[4912]: I1208 21:33:14.080154 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23c5450e-4908-4bb9-a24f-09b8016d6b0a-kube-api-access-pvglv" (OuterVolumeSpecName: "kube-api-access-pvglv") pod "23c5450e-4908-4bb9-a24f-09b8016d6b0a" (UID: "23c5450e-4908-4bb9-a24f-09b8016d6b0a"). InnerVolumeSpecName "kube-api-access-pvglv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:33:14 crc kubenswrapper[4912]: I1208 21:33:14.088470 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23c5450e-4908-4bb9-a24f-09b8016d6b0a-util" (OuterVolumeSpecName: "util") pod "23c5450e-4908-4bb9-a24f-09b8016d6b0a" (UID: "23c5450e-4908-4bb9-a24f-09b8016d6b0a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:33:14 crc kubenswrapper[4912]: I1208 21:33:14.175833 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvglv\" (UniqueName: \"kubernetes.io/projected/23c5450e-4908-4bb9-a24f-09b8016d6b0a-kube-api-access-pvglv\") on node \"crc\" DevicePath \"\"" Dec 08 21:33:14 crc kubenswrapper[4912]: I1208 21:33:14.175871 4912 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/23c5450e-4908-4bb9-a24f-09b8016d6b0a-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:33:14 crc kubenswrapper[4912]: I1208 21:33:14.175882 4912 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/23c5450e-4908-4bb9-a24f-09b8016d6b0a-util\") on node \"crc\" DevicePath \"\"" Dec 08 21:33:16 crc kubenswrapper[4912]: I1208 21:33:16.911790 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"abf2c0f0-6005-49df-b10f-03839679dd92","Type":"ContainerStarted","Data":"9771c4e06df6b09cd3ccb7dd2882358ba4819eadc1380eb43ff652e2e8f8cae0"} Dec 08 21:33:16 crc kubenswrapper[4912]: I1208 21:33:16.935571 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=3.818563226 podStartE2EDuration="15.935549567s" podCreationTimestamp="2025-12-08 21:33:01 +0000 UTC" firstStartedPulling="2025-12-08 21:33:04.547130349 +0000 UTC m=+866.410132432" lastFinishedPulling="2025-12-08 21:33:16.6641167 +0000 UTC m=+878.527118773" observedRunningTime="2025-12-08 21:33:16.928340165 +0000 UTC m=+878.791342248" watchObservedRunningTime="2025-12-08 21:33:16.935549567 +0000 UTC m=+878.798551660" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.365804 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2"] Dec 08 21:33:20 crc kubenswrapper[4912]: E1208 21:33:20.366812 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c5450e-4908-4bb9-a24f-09b8016d6b0a" containerName="pull" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.366832 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c5450e-4908-4bb9-a24f-09b8016d6b0a" containerName="pull" Dec 08 21:33:20 crc kubenswrapper[4912]: E1208 21:33:20.366846 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c5450e-4908-4bb9-a24f-09b8016d6b0a" containerName="extract" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.366856 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c5450e-4908-4bb9-a24f-09b8016d6b0a" containerName="extract" Dec 08 21:33:20 crc kubenswrapper[4912]: E1208 21:33:20.366872 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c5450e-4908-4bb9-a24f-09b8016d6b0a" containerName="util" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.366878 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c5450e-4908-4bb9-a24f-09b8016d6b0a" containerName="util" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.367024 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="23c5450e-4908-4bb9-a24f-09b8016d6b0a" containerName="extract" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.367887 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.371228 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-lmslx" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.371373 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.371453 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.371778 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.371788 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.374010 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.379861 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2"] Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.466584 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d70194be-442d-46e2-84c6-6572c73fb5a8-webhook-cert\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.466939 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d70194be-442d-46e2-84c6-6572c73fb5a8-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.467263 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d70194be-442d-46e2-84c6-6572c73fb5a8-apiservice-cert\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.467454 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x76g9\" (UniqueName: \"kubernetes.io/projected/d70194be-442d-46e2-84c6-6572c73fb5a8-kube-api-access-x76g9\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.467629 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/d70194be-442d-46e2-84c6-6572c73fb5a8-manager-config\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.569019 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d70194be-442d-46e2-84c6-6572c73fb5a8-apiservice-cert\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.569208 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x76g9\" (UniqueName: \"kubernetes.io/projected/d70194be-442d-46e2-84c6-6572c73fb5a8-kube-api-access-x76g9\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.569266 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/d70194be-442d-46e2-84c6-6572c73fb5a8-manager-config\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.569326 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d70194be-442d-46e2-84c6-6572c73fb5a8-webhook-cert\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.569361 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d70194be-442d-46e2-84c6-6572c73fb5a8-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.570504 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/d70194be-442d-46e2-84c6-6572c73fb5a8-manager-config\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.574976 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d70194be-442d-46e2-84c6-6572c73fb5a8-apiservice-cert\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.582855 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d70194be-442d-46e2-84c6-6572c73fb5a8-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.591678 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d70194be-442d-46e2-84c6-6572c73fb5a8-webhook-cert\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.615700 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x76g9\" (UniqueName: \"kubernetes.io/projected/d70194be-442d-46e2-84c6-6572c73fb5a8-kube-api-access-x76g9\") pod \"loki-operator-controller-manager-74685c6454-9rqb2\" (UID: \"d70194be-442d-46e2-84c6-6572c73fb5a8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:20 crc kubenswrapper[4912]: I1208 21:33:20.685679 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:21 crc kubenswrapper[4912]: I1208 21:33:21.002444 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2"] Dec 08 21:33:21 crc kubenswrapper[4912]: W1208 21:33:21.015144 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd70194be_442d_46e2_84c6_6572c73fb5a8.slice/crio-41a23f6204e8a343abb62bb68c1c5b70503c7d0006c322d8272967a609908eb0 WatchSource:0}: Error finding container 41a23f6204e8a343abb62bb68c1c5b70503c7d0006c322d8272967a609908eb0: Status 404 returned error can't find the container with id 41a23f6204e8a343abb62bb68c1c5b70503c7d0006c322d8272967a609908eb0 Dec 08 21:33:22 crc kubenswrapper[4912]: I1208 21:33:21.949278 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" event={"ID":"d70194be-442d-46e2-84c6-6572c73fb5a8","Type":"ContainerStarted","Data":"41a23f6204e8a343abb62bb68c1c5b70503c7d0006c322d8272967a609908eb0"} Dec 08 21:33:32 crc kubenswrapper[4912]: I1208 21:33:32.015067 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" event={"ID":"d70194be-442d-46e2-84c6-6572c73fb5a8","Type":"ContainerStarted","Data":"d2afab5aa746515c27a9670b7aa881da0c857a0aaccb48fee3f67d6fe47d0450"} Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.201186 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lbd9q"] Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.203149 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.213785 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lbd9q"] Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.338572 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl82m\" (UniqueName: \"kubernetes.io/projected/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-kube-api-access-rl82m\") pod \"redhat-marketplace-lbd9q\" (UID: \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\") " pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.338672 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-catalog-content\") pod \"redhat-marketplace-lbd9q\" (UID: \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\") " pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.338936 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-utilities\") pod \"redhat-marketplace-lbd9q\" (UID: \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\") " pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.440969 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-utilities\") pod \"redhat-marketplace-lbd9q\" (UID: \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\") " pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.441073 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl82m\" (UniqueName: \"kubernetes.io/projected/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-kube-api-access-rl82m\") pod \"redhat-marketplace-lbd9q\" (UID: \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\") " pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.441126 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-catalog-content\") pod \"redhat-marketplace-lbd9q\" (UID: \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\") " pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.441824 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-catalog-content\") pod \"redhat-marketplace-lbd9q\" (UID: \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\") " pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.442125 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-utilities\") pod \"redhat-marketplace-lbd9q\" (UID: \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\") " pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.472096 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl82m\" (UniqueName: \"kubernetes.io/projected/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-kube-api-access-rl82m\") pod \"redhat-marketplace-lbd9q\" (UID: \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\") " pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:34 crc kubenswrapper[4912]: I1208 21:33:34.525204 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:40 crc kubenswrapper[4912]: I1208 21:33:40.172417 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lbd9q"] Dec 08 21:33:41 crc kubenswrapper[4912]: I1208 21:33:41.074792 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" event={"ID":"d70194be-442d-46e2-84c6-6572c73fb5a8","Type":"ContainerStarted","Data":"28a75f6a63b8135adb06b97a39bfff71d574f93e0574073ddb4ca0feb64f4b1d"} Dec 08 21:33:41 crc kubenswrapper[4912]: I1208 21:33:41.075280 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:41 crc kubenswrapper[4912]: I1208 21:33:41.076461 4912 generic.go:334] "Generic (PLEG): container finished" podID="f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" containerID="34a517ad3cfbaa7b8c7ab3fa1a3d7c61494fc9042469abd3343dad3ac2eecb09" exitCode=0 Dec 08 21:33:41 crc kubenswrapper[4912]: I1208 21:33:41.076498 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbd9q" event={"ID":"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde","Type":"ContainerDied","Data":"34a517ad3cfbaa7b8c7ab3fa1a3d7c61494fc9042469abd3343dad3ac2eecb09"} Dec 08 21:33:41 crc kubenswrapper[4912]: I1208 21:33:41.076516 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbd9q" event={"ID":"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde","Type":"ContainerStarted","Data":"8781c65fe8a2b6fc5d6de8837057c0716171b82375c0076b01f75802f1fac95b"} Dec 08 21:33:41 crc kubenswrapper[4912]: I1208 21:33:41.077513 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" Dec 08 21:33:41 crc kubenswrapper[4912]: I1208 21:33:41.106959 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-74685c6454-9rqb2" podStartSLOduration=1.945280176 podStartE2EDuration="21.106934983s" podCreationTimestamp="2025-12-08 21:33:20 +0000 UTC" firstStartedPulling="2025-12-08 21:33:21.018670379 +0000 UTC m=+882.881672462" lastFinishedPulling="2025-12-08 21:33:40.180325186 +0000 UTC m=+902.043327269" observedRunningTime="2025-12-08 21:33:41.101167325 +0000 UTC m=+902.964169408" watchObservedRunningTime="2025-12-08 21:33:41.106934983 +0000 UTC m=+902.969937066" Dec 08 21:33:42 crc kubenswrapper[4912]: I1208 21:33:42.084752 4912 generic.go:334] "Generic (PLEG): container finished" podID="f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" containerID="472201a707699d62a7bc07301a98fcbd401eb2145a42a76bcf3b475fb9dec819" exitCode=0 Dec 08 21:33:42 crc kubenswrapper[4912]: I1208 21:33:42.086178 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbd9q" event={"ID":"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde","Type":"ContainerDied","Data":"472201a707699d62a7bc07301a98fcbd401eb2145a42a76bcf3b475fb9dec819"} Dec 08 21:33:46 crc kubenswrapper[4912]: I1208 21:33:46.113529 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbd9q" event={"ID":"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde","Type":"ContainerStarted","Data":"a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35"} Dec 08 21:33:47 crc kubenswrapper[4912]: I1208 21:33:47.145934 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lbd9q" podStartSLOduration=11.712249235 podStartE2EDuration="13.145909782s" podCreationTimestamp="2025-12-08 21:33:34 +0000 UTC" firstStartedPulling="2025-12-08 21:33:41.07879097 +0000 UTC m=+902.941793053" lastFinishedPulling="2025-12-08 21:33:42.512451517 +0000 UTC m=+904.375453600" observedRunningTime="2025-12-08 21:33:47.142186093 +0000 UTC m=+909.005188196" watchObservedRunningTime="2025-12-08 21:33:47.145909782 +0000 UTC m=+909.008911875" Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.237626 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mqzbn"] Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.241231 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.251198 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mqzbn"] Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.366564 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df51c77d-7f70-41b6-9a81-291c708ac980-utilities\") pod \"certified-operators-mqzbn\" (UID: \"df51c77d-7f70-41b6-9a81-291c708ac980\") " pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.367165 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df51c77d-7f70-41b6-9a81-291c708ac980-catalog-content\") pod \"certified-operators-mqzbn\" (UID: \"df51c77d-7f70-41b6-9a81-291c708ac980\") " pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.367227 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skkfn\" (UniqueName: \"kubernetes.io/projected/df51c77d-7f70-41b6-9a81-291c708ac980-kube-api-access-skkfn\") pod \"certified-operators-mqzbn\" (UID: \"df51c77d-7f70-41b6-9a81-291c708ac980\") " pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.468296 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df51c77d-7f70-41b6-9a81-291c708ac980-catalog-content\") pod \"certified-operators-mqzbn\" (UID: \"df51c77d-7f70-41b6-9a81-291c708ac980\") " pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.468378 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skkfn\" (UniqueName: \"kubernetes.io/projected/df51c77d-7f70-41b6-9a81-291c708ac980-kube-api-access-skkfn\") pod \"certified-operators-mqzbn\" (UID: \"df51c77d-7f70-41b6-9a81-291c708ac980\") " pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.468418 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df51c77d-7f70-41b6-9a81-291c708ac980-utilities\") pod \"certified-operators-mqzbn\" (UID: \"df51c77d-7f70-41b6-9a81-291c708ac980\") " pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.468948 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df51c77d-7f70-41b6-9a81-291c708ac980-catalog-content\") pod \"certified-operators-mqzbn\" (UID: \"df51c77d-7f70-41b6-9a81-291c708ac980\") " pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.469524 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df51c77d-7f70-41b6-9a81-291c708ac980-utilities\") pod \"certified-operators-mqzbn\" (UID: \"df51c77d-7f70-41b6-9a81-291c708ac980\") " pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.490067 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skkfn\" (UniqueName: \"kubernetes.io/projected/df51c77d-7f70-41b6-9a81-291c708ac980-kube-api-access-skkfn\") pod \"certified-operators-mqzbn\" (UID: \"df51c77d-7f70-41b6-9a81-291c708ac980\") " pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:33:53 crc kubenswrapper[4912]: I1208 21:33:53.607279 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:33:54 crc kubenswrapper[4912]: I1208 21:33:54.105304 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mqzbn"] Dec 08 21:33:54 crc kubenswrapper[4912]: W1208 21:33:54.129988 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf51c77d_7f70_41b6_9a81_291c708ac980.slice/crio-5796db91a0fbc66d6a20a5ea81548e9816f6c29805935915dca86fa6fdfb6d46 WatchSource:0}: Error finding container 5796db91a0fbc66d6a20a5ea81548e9816f6c29805935915dca86fa6fdfb6d46: Status 404 returned error can't find the container with id 5796db91a0fbc66d6a20a5ea81548e9816f6c29805935915dca86fa6fdfb6d46 Dec 08 21:33:54 crc kubenswrapper[4912]: I1208 21:33:54.161385 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqzbn" event={"ID":"df51c77d-7f70-41b6-9a81-291c708ac980","Type":"ContainerStarted","Data":"5796db91a0fbc66d6a20a5ea81548e9816f6c29805935915dca86fa6fdfb6d46"} Dec 08 21:33:54 crc kubenswrapper[4912]: I1208 21:33:54.526096 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:54 crc kubenswrapper[4912]: I1208 21:33:54.527438 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:54 crc kubenswrapper[4912]: I1208 21:33:54.569427 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:55 crc kubenswrapper[4912]: I1208 21:33:55.172486 4912 generic.go:334] "Generic (PLEG): container finished" podID="df51c77d-7f70-41b6-9a81-291c708ac980" containerID="961c8fecfd01cd3720609bd34fe73e2f26b462edf53be78a248893cc076bf249" exitCode=0 Dec 08 21:33:55 crc kubenswrapper[4912]: I1208 21:33:55.172571 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqzbn" event={"ID":"df51c77d-7f70-41b6-9a81-291c708ac980","Type":"ContainerDied","Data":"961c8fecfd01cd3720609bd34fe73e2f26b462edf53be78a248893cc076bf249"} Dec 08 21:33:55 crc kubenswrapper[4912]: I1208 21:33:55.225901 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:56 crc kubenswrapper[4912]: I1208 21:33:56.184164 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqzbn" event={"ID":"df51c77d-7f70-41b6-9a81-291c708ac980","Type":"ContainerStarted","Data":"1f815d18247247a9855c576a13749a1e9331a2931e2224759f1a7b1a28b60da2"} Dec 08 21:33:56 crc kubenswrapper[4912]: I1208 21:33:56.800465 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lbd9q"] Dec 08 21:33:57 crc kubenswrapper[4912]: I1208 21:33:57.193068 4912 generic.go:334] "Generic (PLEG): container finished" podID="df51c77d-7f70-41b6-9a81-291c708ac980" containerID="1f815d18247247a9855c576a13749a1e9331a2931e2224759f1a7b1a28b60da2" exitCode=0 Dec 08 21:33:57 crc kubenswrapper[4912]: I1208 21:33:57.193226 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqzbn" event={"ID":"df51c77d-7f70-41b6-9a81-291c708ac980","Type":"ContainerDied","Data":"1f815d18247247a9855c576a13749a1e9331a2931e2224759f1a7b1a28b60da2"} Dec 08 21:33:58 crc kubenswrapper[4912]: I1208 21:33:58.198896 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lbd9q" podUID="f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" containerName="registry-server" containerID="cri-o://a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35" gracePeriod=2 Dec 08 21:33:58 crc kubenswrapper[4912]: I1208 21:33:58.597187 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:58 crc kubenswrapper[4912]: I1208 21:33:58.657533 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rl82m\" (UniqueName: \"kubernetes.io/projected/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-kube-api-access-rl82m\") pod \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\" (UID: \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\") " Dec 08 21:33:58 crc kubenswrapper[4912]: I1208 21:33:58.657737 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-utilities\") pod \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\" (UID: \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\") " Dec 08 21:33:58 crc kubenswrapper[4912]: I1208 21:33:58.657762 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-catalog-content\") pod \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\" (UID: \"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde\") " Dec 08 21:33:58 crc kubenswrapper[4912]: I1208 21:33:58.659450 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-utilities" (OuterVolumeSpecName: "utilities") pod "f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" (UID: "f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:33:58 crc kubenswrapper[4912]: I1208 21:33:58.667280 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-kube-api-access-rl82m" (OuterVolumeSpecName: "kube-api-access-rl82m") pod "f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" (UID: "f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde"). InnerVolumeSpecName "kube-api-access-rl82m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:33:58 crc kubenswrapper[4912]: I1208 21:33:58.685485 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" (UID: "f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:33:58 crc kubenswrapper[4912]: I1208 21:33:58.759951 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:33:58 crc kubenswrapper[4912]: I1208 21:33:58.760295 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:33:58 crc kubenswrapper[4912]: I1208 21:33:58.760310 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rl82m\" (UniqueName: \"kubernetes.io/projected/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde-kube-api-access-rl82m\") on node \"crc\" DevicePath \"\"" Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.212751 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqzbn" event={"ID":"df51c77d-7f70-41b6-9a81-291c708ac980","Type":"ContainerStarted","Data":"673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762"} Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.215067 4912 generic.go:334] "Generic (PLEG): container finished" podID="f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" containerID="a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35" exitCode=0 Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.215143 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbd9q" event={"ID":"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde","Type":"ContainerDied","Data":"a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35"} Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.215327 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbd9q" event={"ID":"f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde","Type":"ContainerDied","Data":"8781c65fe8a2b6fc5d6de8837057c0716171b82375c0076b01f75802f1fac95b"} Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.215428 4912 scope.go:117] "RemoveContainer" containerID="a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35" Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.215257 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lbd9q" Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.230611 4912 scope.go:117] "RemoveContainer" containerID="472201a707699d62a7bc07301a98fcbd401eb2145a42a76bcf3b475fb9dec819" Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.242149 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mqzbn" podStartSLOduration=2.768077612 podStartE2EDuration="6.242134736s" podCreationTimestamp="2025-12-08 21:33:53 +0000 UTC" firstStartedPulling="2025-12-08 21:33:55.177839204 +0000 UTC m=+917.040841297" lastFinishedPulling="2025-12-08 21:33:58.651896338 +0000 UTC m=+920.514898421" observedRunningTime="2025-12-08 21:33:59.236822989 +0000 UTC m=+921.099825072" watchObservedRunningTime="2025-12-08 21:33:59.242134736 +0000 UTC m=+921.105136819" Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.249507 4912 scope.go:117] "RemoveContainer" containerID="34a517ad3cfbaa7b8c7ab3fa1a3d7c61494fc9042469abd3343dad3ac2eecb09" Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.257535 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lbd9q"] Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.261930 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lbd9q"] Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.264209 4912 scope.go:117] "RemoveContainer" containerID="a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35" Dec 08 21:33:59 crc kubenswrapper[4912]: E1208 21:33:59.264578 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35\": container with ID starting with a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35 not found: ID does not exist" containerID="a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35" Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.264621 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35"} err="failed to get container status \"a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35\": rpc error: code = NotFound desc = could not find container \"a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35\": container with ID starting with a981fbd3eb355aa982cc19d70e68d1c82b544c33a28fa42a88520d0709730c35 not found: ID does not exist" Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.264646 4912 scope.go:117] "RemoveContainer" containerID="472201a707699d62a7bc07301a98fcbd401eb2145a42a76bcf3b475fb9dec819" Dec 08 21:33:59 crc kubenswrapper[4912]: E1208 21:33:59.264976 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"472201a707699d62a7bc07301a98fcbd401eb2145a42a76bcf3b475fb9dec819\": container with ID starting with 472201a707699d62a7bc07301a98fcbd401eb2145a42a76bcf3b475fb9dec819 not found: ID does not exist" containerID="472201a707699d62a7bc07301a98fcbd401eb2145a42a76bcf3b475fb9dec819" Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.264994 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"472201a707699d62a7bc07301a98fcbd401eb2145a42a76bcf3b475fb9dec819"} err="failed to get container status \"472201a707699d62a7bc07301a98fcbd401eb2145a42a76bcf3b475fb9dec819\": rpc error: code = NotFound desc = could not find container \"472201a707699d62a7bc07301a98fcbd401eb2145a42a76bcf3b475fb9dec819\": container with ID starting with 472201a707699d62a7bc07301a98fcbd401eb2145a42a76bcf3b475fb9dec819 not found: ID does not exist" Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.265007 4912 scope.go:117] "RemoveContainer" containerID="34a517ad3cfbaa7b8c7ab3fa1a3d7c61494fc9042469abd3343dad3ac2eecb09" Dec 08 21:33:59 crc kubenswrapper[4912]: E1208 21:33:59.265213 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34a517ad3cfbaa7b8c7ab3fa1a3d7c61494fc9042469abd3343dad3ac2eecb09\": container with ID starting with 34a517ad3cfbaa7b8c7ab3fa1a3d7c61494fc9042469abd3343dad3ac2eecb09 not found: ID does not exist" containerID="34a517ad3cfbaa7b8c7ab3fa1a3d7c61494fc9042469abd3343dad3ac2eecb09" Dec 08 21:33:59 crc kubenswrapper[4912]: I1208 21:33:59.265233 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34a517ad3cfbaa7b8c7ab3fa1a3d7c61494fc9042469abd3343dad3ac2eecb09"} err="failed to get container status \"34a517ad3cfbaa7b8c7ab3fa1a3d7c61494fc9042469abd3343dad3ac2eecb09\": rpc error: code = NotFound desc = could not find container \"34a517ad3cfbaa7b8c7ab3fa1a3d7c61494fc9042469abd3343dad3ac2eecb09\": container with ID starting with 34a517ad3cfbaa7b8c7ab3fa1a3d7c61494fc9042469abd3343dad3ac2eecb09 not found: ID does not exist" Dec 08 21:34:00 crc kubenswrapper[4912]: I1208 21:34:00.435516 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" path="/var/lib/kubelet/pods/f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde/volumes" Dec 08 21:34:03 crc kubenswrapper[4912]: I1208 21:34:03.608182 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:34:03 crc kubenswrapper[4912]: I1208 21:34:03.608519 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:34:03 crc kubenswrapper[4912]: I1208 21:34:03.658182 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:34:04 crc kubenswrapper[4912]: I1208 21:34:04.295138 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:34:05 crc kubenswrapper[4912]: I1208 21:34:05.806603 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mqzbn"] Dec 08 21:34:06 crc kubenswrapper[4912]: I1208 21:34:06.265216 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mqzbn" podUID="df51c77d-7f70-41b6-9a81-291c708ac980" containerName="registry-server" containerID="cri-o://673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762" gracePeriod=2 Dec 08 21:34:06 crc kubenswrapper[4912]: I1208 21:34:06.614446 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:34:06 crc kubenswrapper[4912]: I1208 21:34:06.673192 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df51c77d-7f70-41b6-9a81-291c708ac980-utilities\") pod \"df51c77d-7f70-41b6-9a81-291c708ac980\" (UID: \"df51c77d-7f70-41b6-9a81-291c708ac980\") " Dec 08 21:34:06 crc kubenswrapper[4912]: I1208 21:34:06.673253 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df51c77d-7f70-41b6-9a81-291c708ac980-catalog-content\") pod \"df51c77d-7f70-41b6-9a81-291c708ac980\" (UID: \"df51c77d-7f70-41b6-9a81-291c708ac980\") " Dec 08 21:34:06 crc kubenswrapper[4912]: I1208 21:34:06.673378 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skkfn\" (UniqueName: \"kubernetes.io/projected/df51c77d-7f70-41b6-9a81-291c708ac980-kube-api-access-skkfn\") pod \"df51c77d-7f70-41b6-9a81-291c708ac980\" (UID: \"df51c77d-7f70-41b6-9a81-291c708ac980\") " Dec 08 21:34:06 crc kubenswrapper[4912]: I1208 21:34:06.673854 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df51c77d-7f70-41b6-9a81-291c708ac980-utilities" (OuterVolumeSpecName: "utilities") pod "df51c77d-7f70-41b6-9a81-291c708ac980" (UID: "df51c77d-7f70-41b6-9a81-291c708ac980"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:34:06 crc kubenswrapper[4912]: I1208 21:34:06.674633 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df51c77d-7f70-41b6-9a81-291c708ac980-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:06 crc kubenswrapper[4912]: I1208 21:34:06.678608 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df51c77d-7f70-41b6-9a81-291c708ac980-kube-api-access-skkfn" (OuterVolumeSpecName: "kube-api-access-skkfn") pod "df51c77d-7f70-41b6-9a81-291c708ac980" (UID: "df51c77d-7f70-41b6-9a81-291c708ac980"). InnerVolumeSpecName "kube-api-access-skkfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:34:06 crc kubenswrapper[4912]: I1208 21:34:06.775983 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skkfn\" (UniqueName: \"kubernetes.io/projected/df51c77d-7f70-41b6-9a81-291c708ac980-kube-api-access-skkfn\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.069499 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df51c77d-7f70-41b6-9a81-291c708ac980-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df51c77d-7f70-41b6-9a81-291c708ac980" (UID: "df51c77d-7f70-41b6-9a81-291c708ac980"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.079657 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df51c77d-7f70-41b6-9a81-291c708ac980-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.271495 4912 generic.go:334] "Generic (PLEG): container finished" podID="df51c77d-7f70-41b6-9a81-291c708ac980" containerID="673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762" exitCode=0 Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.271539 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqzbn" event={"ID":"df51c77d-7f70-41b6-9a81-291c708ac980","Type":"ContainerDied","Data":"673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762"} Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.271584 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqzbn" event={"ID":"df51c77d-7f70-41b6-9a81-291c708ac980","Type":"ContainerDied","Data":"5796db91a0fbc66d6a20a5ea81548e9816f6c29805935915dca86fa6fdfb6d46"} Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.271611 4912 scope.go:117] "RemoveContainer" containerID="673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762" Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.271551 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqzbn" Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.294083 4912 scope.go:117] "RemoveContainer" containerID="1f815d18247247a9855c576a13749a1e9331a2931e2224759f1a7b1a28b60da2" Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.304951 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mqzbn"] Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.310094 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mqzbn"] Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.326080 4912 scope.go:117] "RemoveContainer" containerID="961c8fecfd01cd3720609bd34fe73e2f26b462edf53be78a248893cc076bf249" Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.346386 4912 scope.go:117] "RemoveContainer" containerID="673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762" Dec 08 21:34:07 crc kubenswrapper[4912]: E1208 21:34:07.346790 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762\": container with ID starting with 673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762 not found: ID does not exist" containerID="673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762" Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.346823 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762"} err="failed to get container status \"673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762\": rpc error: code = NotFound desc = could not find container \"673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762\": container with ID starting with 673c8a674a3c4923a9ad2a5a6716ce02971e9edf5da09d385e0c352e08479762 not found: ID does not exist" Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.346850 4912 scope.go:117] "RemoveContainer" containerID="1f815d18247247a9855c576a13749a1e9331a2931e2224759f1a7b1a28b60da2" Dec 08 21:34:07 crc kubenswrapper[4912]: E1208 21:34:07.347323 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f815d18247247a9855c576a13749a1e9331a2931e2224759f1a7b1a28b60da2\": container with ID starting with 1f815d18247247a9855c576a13749a1e9331a2931e2224759f1a7b1a28b60da2 not found: ID does not exist" containerID="1f815d18247247a9855c576a13749a1e9331a2931e2224759f1a7b1a28b60da2" Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.347343 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f815d18247247a9855c576a13749a1e9331a2931e2224759f1a7b1a28b60da2"} err="failed to get container status \"1f815d18247247a9855c576a13749a1e9331a2931e2224759f1a7b1a28b60da2\": rpc error: code = NotFound desc = could not find container \"1f815d18247247a9855c576a13749a1e9331a2931e2224759f1a7b1a28b60da2\": container with ID starting with 1f815d18247247a9855c576a13749a1e9331a2931e2224759f1a7b1a28b60da2 not found: ID does not exist" Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.347355 4912 scope.go:117] "RemoveContainer" containerID="961c8fecfd01cd3720609bd34fe73e2f26b462edf53be78a248893cc076bf249" Dec 08 21:34:07 crc kubenswrapper[4912]: E1208 21:34:07.347559 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"961c8fecfd01cd3720609bd34fe73e2f26b462edf53be78a248893cc076bf249\": container with ID starting with 961c8fecfd01cd3720609bd34fe73e2f26b462edf53be78a248893cc076bf249 not found: ID does not exist" containerID="961c8fecfd01cd3720609bd34fe73e2f26b462edf53be78a248893cc076bf249" Dec 08 21:34:07 crc kubenswrapper[4912]: I1208 21:34:07.347578 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"961c8fecfd01cd3720609bd34fe73e2f26b462edf53be78a248893cc076bf249"} err="failed to get container status \"961c8fecfd01cd3720609bd34fe73e2f26b462edf53be78a248893cc076bf249\": rpc error: code = NotFound desc = could not find container \"961c8fecfd01cd3720609bd34fe73e2f26b462edf53be78a248893cc076bf249\": container with ID starting with 961c8fecfd01cd3720609bd34fe73e2f26b462edf53be78a248893cc076bf249 not found: ID does not exist" Dec 08 21:34:08 crc kubenswrapper[4912]: I1208 21:34:08.434099 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df51c77d-7f70-41b6-9a81-291c708ac980" path="/var/lib/kubelet/pods/df51c77d-7f70-41b6-9a81-291c708ac980/volumes" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.086156 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg"] Dec 08 21:34:22 crc kubenswrapper[4912]: E1208 21:34:22.086922 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df51c77d-7f70-41b6-9a81-291c708ac980" containerName="registry-server" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.086934 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="df51c77d-7f70-41b6-9a81-291c708ac980" containerName="registry-server" Dec 08 21:34:22 crc kubenswrapper[4912]: E1208 21:34:22.086942 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" containerName="registry-server" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.086948 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" containerName="registry-server" Dec 08 21:34:22 crc kubenswrapper[4912]: E1208 21:34:22.086959 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" containerName="extract-content" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.086965 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" containerName="extract-content" Dec 08 21:34:22 crc kubenswrapper[4912]: E1208 21:34:22.086976 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df51c77d-7f70-41b6-9a81-291c708ac980" containerName="extract-content" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.086981 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="df51c77d-7f70-41b6-9a81-291c708ac980" containerName="extract-content" Dec 08 21:34:22 crc kubenswrapper[4912]: E1208 21:34:22.086990 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" containerName="extract-utilities" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.086996 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" containerName="extract-utilities" Dec 08 21:34:22 crc kubenswrapper[4912]: E1208 21:34:22.087010 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df51c77d-7f70-41b6-9a81-291c708ac980" containerName="extract-utilities" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.087015 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="df51c77d-7f70-41b6-9a81-291c708ac980" containerName="extract-utilities" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.087138 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f10ce6ff-4d7f-4bf0-8ebd-374b924d8dde" containerName="registry-server" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.087154 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="df51c77d-7f70-41b6-9a81-291c708ac980" containerName="registry-server" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.087958 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.090483 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.108007 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg"] Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.205874 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98eb7224-c5a5-46c2-8b6c-c515e010fb28-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg\" (UID: \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.206117 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98eb7224-c5a5-46c2-8b6c-c515e010fb28-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg\" (UID: \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.206214 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7fdm\" (UniqueName: \"kubernetes.io/projected/98eb7224-c5a5-46c2-8b6c-c515e010fb28-kube-api-access-j7fdm\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg\" (UID: \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.307003 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98eb7224-c5a5-46c2-8b6c-c515e010fb28-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg\" (UID: \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.307091 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7fdm\" (UniqueName: \"kubernetes.io/projected/98eb7224-c5a5-46c2-8b6c-c515e010fb28-kube-api-access-j7fdm\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg\" (UID: \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.307149 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98eb7224-c5a5-46c2-8b6c-c515e010fb28-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg\" (UID: \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.307619 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98eb7224-c5a5-46c2-8b6c-c515e010fb28-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg\" (UID: \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.307642 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98eb7224-c5a5-46c2-8b6c-c515e010fb28-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg\" (UID: \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.328858 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7fdm\" (UniqueName: \"kubernetes.io/projected/98eb7224-c5a5-46c2-8b6c-c515e010fb28-kube-api-access-j7fdm\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg\" (UID: \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.412477 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:22 crc kubenswrapper[4912]: I1208 21:34:22.817839 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg"] Dec 08 21:34:23 crc kubenswrapper[4912]: I1208 21:34:23.376238 4912 generic.go:334] "Generic (PLEG): container finished" podID="98eb7224-c5a5-46c2-8b6c-c515e010fb28" containerID="0fcc585fa4a1082b8a2ef72fa5afbf22fee325d5e3b69ff86c89a6dbacc138c0" exitCode=0 Dec 08 21:34:23 crc kubenswrapper[4912]: I1208 21:34:23.376319 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" event={"ID":"98eb7224-c5a5-46c2-8b6c-c515e010fb28","Type":"ContainerDied","Data":"0fcc585fa4a1082b8a2ef72fa5afbf22fee325d5e3b69ff86c89a6dbacc138c0"} Dec 08 21:34:23 crc kubenswrapper[4912]: I1208 21:34:23.376548 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" event={"ID":"98eb7224-c5a5-46c2-8b6c-c515e010fb28","Type":"ContainerStarted","Data":"9e61d076d8efe2ccd68b6fc29da17e2d97d2e330a97ec616012d9304f13646c4"} Dec 08 21:34:25 crc kubenswrapper[4912]: I1208 21:34:25.393442 4912 generic.go:334] "Generic (PLEG): container finished" podID="98eb7224-c5a5-46c2-8b6c-c515e010fb28" containerID="fe2c4c58c5584b77b18e3a6fad3154c1227dfb94567e0829166cd9597adc60d6" exitCode=0 Dec 08 21:34:25 crc kubenswrapper[4912]: I1208 21:34:25.393575 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" event={"ID":"98eb7224-c5a5-46c2-8b6c-c515e010fb28","Type":"ContainerDied","Data":"fe2c4c58c5584b77b18e3a6fad3154c1227dfb94567e0829166cd9597adc60d6"} Dec 08 21:34:26 crc kubenswrapper[4912]: I1208 21:34:26.401484 4912 generic.go:334] "Generic (PLEG): container finished" podID="98eb7224-c5a5-46c2-8b6c-c515e010fb28" containerID="52b91a7b02442dffcc5567a2d4989e906468d07db5e1e40b16eaeafd609233ee" exitCode=0 Dec 08 21:34:26 crc kubenswrapper[4912]: I1208 21:34:26.401714 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" event={"ID":"98eb7224-c5a5-46c2-8b6c-c515e010fb28","Type":"ContainerDied","Data":"52b91a7b02442dffcc5567a2d4989e906468d07db5e1e40b16eaeafd609233ee"} Dec 08 21:34:27 crc kubenswrapper[4912]: I1208 21:34:27.699123 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:27 crc kubenswrapper[4912]: I1208 21:34:27.885064 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98eb7224-c5a5-46c2-8b6c-c515e010fb28-bundle\") pod \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\" (UID: \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\") " Dec 08 21:34:27 crc kubenswrapper[4912]: I1208 21:34:27.885129 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7fdm\" (UniqueName: \"kubernetes.io/projected/98eb7224-c5a5-46c2-8b6c-c515e010fb28-kube-api-access-j7fdm\") pod \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\" (UID: \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\") " Dec 08 21:34:27 crc kubenswrapper[4912]: I1208 21:34:27.885162 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98eb7224-c5a5-46c2-8b6c-c515e010fb28-util\") pod \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\" (UID: \"98eb7224-c5a5-46c2-8b6c-c515e010fb28\") " Dec 08 21:34:27 crc kubenswrapper[4912]: I1208 21:34:27.885689 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98eb7224-c5a5-46c2-8b6c-c515e010fb28-bundle" (OuterVolumeSpecName: "bundle") pod "98eb7224-c5a5-46c2-8b6c-c515e010fb28" (UID: "98eb7224-c5a5-46c2-8b6c-c515e010fb28"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:34:27 crc kubenswrapper[4912]: I1208 21:34:27.891767 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98eb7224-c5a5-46c2-8b6c-c515e010fb28-kube-api-access-j7fdm" (OuterVolumeSpecName: "kube-api-access-j7fdm") pod "98eb7224-c5a5-46c2-8b6c-c515e010fb28" (UID: "98eb7224-c5a5-46c2-8b6c-c515e010fb28"). InnerVolumeSpecName "kube-api-access-j7fdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:34:27 crc kubenswrapper[4912]: I1208 21:34:27.899849 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98eb7224-c5a5-46c2-8b6c-c515e010fb28-util" (OuterVolumeSpecName: "util") pod "98eb7224-c5a5-46c2-8b6c-c515e010fb28" (UID: "98eb7224-c5a5-46c2-8b6c-c515e010fb28"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:34:27 crc kubenswrapper[4912]: I1208 21:34:27.986902 4912 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98eb7224-c5a5-46c2-8b6c-c515e010fb28-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:27 crc kubenswrapper[4912]: I1208 21:34:27.986951 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7fdm\" (UniqueName: \"kubernetes.io/projected/98eb7224-c5a5-46c2-8b6c-c515e010fb28-kube-api-access-j7fdm\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:27 crc kubenswrapper[4912]: I1208 21:34:27.986963 4912 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98eb7224-c5a5-46c2-8b6c-c515e010fb28-util\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:28 crc kubenswrapper[4912]: I1208 21:34:28.416335 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" event={"ID":"98eb7224-c5a5-46c2-8b6c-c515e010fb28","Type":"ContainerDied","Data":"9e61d076d8efe2ccd68b6fc29da17e2d97d2e330a97ec616012d9304f13646c4"} Dec 08 21:34:28 crc kubenswrapper[4912]: I1208 21:34:28.416374 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e61d076d8efe2ccd68b6fc29da17e2d97d2e330a97ec616012d9304f13646c4" Dec 08 21:34:28 crc kubenswrapper[4912]: I1208 21:34:28.416436 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg" Dec 08 21:34:30 crc kubenswrapper[4912]: I1208 21:34:30.874886 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-29q7z"] Dec 08 21:34:30 crc kubenswrapper[4912]: E1208 21:34:30.875480 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98eb7224-c5a5-46c2-8b6c-c515e010fb28" containerName="pull" Dec 08 21:34:30 crc kubenswrapper[4912]: I1208 21:34:30.875495 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="98eb7224-c5a5-46c2-8b6c-c515e010fb28" containerName="pull" Dec 08 21:34:30 crc kubenswrapper[4912]: E1208 21:34:30.875521 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98eb7224-c5a5-46c2-8b6c-c515e010fb28" containerName="extract" Dec 08 21:34:30 crc kubenswrapper[4912]: I1208 21:34:30.875529 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="98eb7224-c5a5-46c2-8b6c-c515e010fb28" containerName="extract" Dec 08 21:34:30 crc kubenswrapper[4912]: E1208 21:34:30.875543 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98eb7224-c5a5-46c2-8b6c-c515e010fb28" containerName="util" Dec 08 21:34:30 crc kubenswrapper[4912]: I1208 21:34:30.875551 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="98eb7224-c5a5-46c2-8b6c-c515e010fb28" containerName="util" Dec 08 21:34:30 crc kubenswrapper[4912]: I1208 21:34:30.875681 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="98eb7224-c5a5-46c2-8b6c-c515e010fb28" containerName="extract" Dec 08 21:34:30 crc kubenswrapper[4912]: I1208 21:34:30.876307 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-29q7z" Dec 08 21:34:30 crc kubenswrapper[4912]: I1208 21:34:30.878282 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-94s9n" Dec 08 21:34:30 crc kubenswrapper[4912]: I1208 21:34:30.878374 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 08 21:34:30 crc kubenswrapper[4912]: I1208 21:34:30.879628 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 08 21:34:30 crc kubenswrapper[4912]: I1208 21:34:30.889715 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-29q7z"] Dec 08 21:34:31 crc kubenswrapper[4912]: I1208 21:34:31.027882 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvd8p\" (UniqueName: \"kubernetes.io/projected/7dcc5a2b-b055-4bdd-903e-ec9772a7877e-kube-api-access-kvd8p\") pod \"nmstate-operator-5b5b58f5c8-29q7z\" (UID: \"7dcc5a2b-b055-4bdd-903e-ec9772a7877e\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-29q7z" Dec 08 21:34:31 crc kubenswrapper[4912]: I1208 21:34:31.129806 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvd8p\" (UniqueName: \"kubernetes.io/projected/7dcc5a2b-b055-4bdd-903e-ec9772a7877e-kube-api-access-kvd8p\") pod \"nmstate-operator-5b5b58f5c8-29q7z\" (UID: \"7dcc5a2b-b055-4bdd-903e-ec9772a7877e\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-29q7z" Dec 08 21:34:31 crc kubenswrapper[4912]: I1208 21:34:31.148512 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvd8p\" (UniqueName: \"kubernetes.io/projected/7dcc5a2b-b055-4bdd-903e-ec9772a7877e-kube-api-access-kvd8p\") pod \"nmstate-operator-5b5b58f5c8-29q7z\" (UID: \"7dcc5a2b-b055-4bdd-903e-ec9772a7877e\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-29q7z" Dec 08 21:34:31 crc kubenswrapper[4912]: I1208 21:34:31.195296 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-29q7z" Dec 08 21:34:31 crc kubenswrapper[4912]: I1208 21:34:31.439587 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-29q7z"] Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.446327 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-29q7z" event={"ID":"7dcc5a2b-b055-4bdd-903e-ec9772a7877e","Type":"ContainerStarted","Data":"71f6471febf6d5a01b834d199df91e5f3d0360b98ace503a5d45231330d48e89"} Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.646641 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xwbjj"] Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.647882 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.670865 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xwbjj"] Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.750453 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a06e1b61-071a-45a0-a973-dd4a8d45843e-utilities\") pod \"community-operators-xwbjj\" (UID: \"a06e1b61-071a-45a0-a973-dd4a8d45843e\") " pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.750561 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjzds\" (UniqueName: \"kubernetes.io/projected/a06e1b61-071a-45a0-a973-dd4a8d45843e-kube-api-access-fjzds\") pod \"community-operators-xwbjj\" (UID: \"a06e1b61-071a-45a0-a973-dd4a8d45843e\") " pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.750764 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a06e1b61-071a-45a0-a973-dd4a8d45843e-catalog-content\") pod \"community-operators-xwbjj\" (UID: \"a06e1b61-071a-45a0-a973-dd4a8d45843e\") " pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.852819 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a06e1b61-071a-45a0-a973-dd4a8d45843e-utilities\") pod \"community-operators-xwbjj\" (UID: \"a06e1b61-071a-45a0-a973-dd4a8d45843e\") " pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.852900 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjzds\" (UniqueName: \"kubernetes.io/projected/a06e1b61-071a-45a0-a973-dd4a8d45843e-kube-api-access-fjzds\") pod \"community-operators-xwbjj\" (UID: \"a06e1b61-071a-45a0-a973-dd4a8d45843e\") " pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.852945 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a06e1b61-071a-45a0-a973-dd4a8d45843e-catalog-content\") pod \"community-operators-xwbjj\" (UID: \"a06e1b61-071a-45a0-a973-dd4a8d45843e\") " pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.855073 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a06e1b61-071a-45a0-a973-dd4a8d45843e-utilities\") pod \"community-operators-xwbjj\" (UID: \"a06e1b61-071a-45a0-a973-dd4a8d45843e\") " pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.855970 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a06e1b61-071a-45a0-a973-dd4a8d45843e-catalog-content\") pod \"community-operators-xwbjj\" (UID: \"a06e1b61-071a-45a0-a973-dd4a8d45843e\") " pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.880607 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjzds\" (UniqueName: \"kubernetes.io/projected/a06e1b61-071a-45a0-a973-dd4a8d45843e-kube-api-access-fjzds\") pod \"community-operators-xwbjj\" (UID: \"a06e1b61-071a-45a0-a973-dd4a8d45843e\") " pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:32 crc kubenswrapper[4912]: I1208 21:34:32.967542 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:33 crc kubenswrapper[4912]: I1208 21:34:33.846310 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xwbjj"] Dec 08 21:34:33 crc kubenswrapper[4912]: W1208 21:34:33.870957 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda06e1b61_071a_45a0_a973_dd4a8d45843e.slice/crio-3748d7fa15847d37496cf4a403bf11b5ce0052f0c7a50b582d70377e74e7f2a4 WatchSource:0}: Error finding container 3748d7fa15847d37496cf4a403bf11b5ce0052f0c7a50b582d70377e74e7f2a4: Status 404 returned error can't find the container with id 3748d7fa15847d37496cf4a403bf11b5ce0052f0c7a50b582d70377e74e7f2a4 Dec 08 21:34:34 crc kubenswrapper[4912]: I1208 21:34:34.461920 4912 generic.go:334] "Generic (PLEG): container finished" podID="a06e1b61-071a-45a0-a973-dd4a8d45843e" containerID="ceb6a930cbdae02cf64b83f9a74dd07925d899d5e4863ba47d2a4a6ff955a4e8" exitCode=0 Dec 08 21:34:34 crc kubenswrapper[4912]: I1208 21:34:34.461968 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwbjj" event={"ID":"a06e1b61-071a-45a0-a973-dd4a8d45843e","Type":"ContainerDied","Data":"ceb6a930cbdae02cf64b83f9a74dd07925d899d5e4863ba47d2a4a6ff955a4e8"} Dec 08 21:34:34 crc kubenswrapper[4912]: I1208 21:34:34.462261 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwbjj" event={"ID":"a06e1b61-071a-45a0-a973-dd4a8d45843e","Type":"ContainerStarted","Data":"3748d7fa15847d37496cf4a403bf11b5ce0052f0c7a50b582d70377e74e7f2a4"} Dec 08 21:34:34 crc kubenswrapper[4912]: I1208 21:34:34.464176 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-29q7z" event={"ID":"7dcc5a2b-b055-4bdd-903e-ec9772a7877e","Type":"ContainerStarted","Data":"5683c5b63d7719f1686e58db1f29c6eecb9c6a63a272ed737f99f5c9c6145038"} Dec 08 21:34:34 crc kubenswrapper[4912]: I1208 21:34:34.495979 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-29q7z" podStartSLOduration=2.404881966 podStartE2EDuration="4.495958105s" podCreationTimestamp="2025-12-08 21:34:30 +0000 UTC" firstStartedPulling="2025-12-08 21:34:31.450853073 +0000 UTC m=+953.313855156" lastFinishedPulling="2025-12-08 21:34:33.541929212 +0000 UTC m=+955.404931295" observedRunningTime="2025-12-08 21:34:34.495600056 +0000 UTC m=+956.358602139" watchObservedRunningTime="2025-12-08 21:34:34.495958105 +0000 UTC m=+956.358960188" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.447208 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-9gtf4"] Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.451500 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9gtf4" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.454820 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-kxcjm" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.528576 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-9gtf4"] Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.539173 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27"] Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.540321 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.544539 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.560138 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27"] Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.566560 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-t75jg"] Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.568220 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.585669 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2zht\" (UniqueName: \"kubernetes.io/projected/666ca373-3dfd-4cc5-bcbe-2d2cc8335b14-kube-api-access-p2zht\") pod \"nmstate-metrics-7f946cbc9-9gtf4\" (UID: \"666ca373-3dfd-4cc5-bcbe-2d2cc8335b14\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9gtf4" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.585715 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67glh\" (UniqueName: \"kubernetes.io/projected/031db5a0-79e6-4206-8b1a-200a5862e1d1-kube-api-access-67glh\") pod \"nmstate-webhook-5f6d4c5ccb-b9w27\" (UID: \"031db5a0-79e6-4206-8b1a-200a5862e1d1\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.585774 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/07d5dc75-389c-464e-aba6-8d1a4dd1e736-nmstate-lock\") pod \"nmstate-handler-t75jg\" (UID: \"07d5dc75-389c-464e-aba6-8d1a4dd1e736\") " pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.585856 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/07d5dc75-389c-464e-aba6-8d1a4dd1e736-dbus-socket\") pod \"nmstate-handler-t75jg\" (UID: \"07d5dc75-389c-464e-aba6-8d1a4dd1e736\") " pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.585911 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/031db5a0-79e6-4206-8b1a-200a5862e1d1-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-b9w27\" (UID: \"031db5a0-79e6-4206-8b1a-200a5862e1d1\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.585936 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl2td\" (UniqueName: \"kubernetes.io/projected/07d5dc75-389c-464e-aba6-8d1a4dd1e736-kube-api-access-gl2td\") pod \"nmstate-handler-t75jg\" (UID: \"07d5dc75-389c-464e-aba6-8d1a4dd1e736\") " pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.585974 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/07d5dc75-389c-464e-aba6-8d1a4dd1e736-ovs-socket\") pod \"nmstate-handler-t75jg\" (UID: \"07d5dc75-389c-464e-aba6-8d1a4dd1e736\") " pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.659540 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn"] Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.660415 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" Dec 08 21:34:35 crc kubenswrapper[4912]: W1208 21:34:35.670872 4912 reflector.go:561] object-"openshift-nmstate"/"default-dockercfg-mqjsc": failed to list *v1.Secret: secrets "default-dockercfg-mqjsc" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-nmstate": no relationship found between node 'crc' and this object Dec 08 21:34:35 crc kubenswrapper[4912]: E1208 21:34:35.670932 4912 reflector.go:158] "Unhandled Error" err="object-\"openshift-nmstate\"/\"default-dockercfg-mqjsc\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-mqjsc\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-nmstate\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.671180 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.672525 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.682779 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn"] Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.688911 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/07d5dc75-389c-464e-aba6-8d1a4dd1e736-nmstate-lock\") pod \"nmstate-handler-t75jg\" (UID: \"07d5dc75-389c-464e-aba6-8d1a4dd1e736\") " pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.688987 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/07d5dc75-389c-464e-aba6-8d1a4dd1e736-dbus-socket\") pod \"nmstate-handler-t75jg\" (UID: \"07d5dc75-389c-464e-aba6-8d1a4dd1e736\") " pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.689023 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/031db5a0-79e6-4206-8b1a-200a5862e1d1-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-b9w27\" (UID: \"031db5a0-79e6-4206-8b1a-200a5862e1d1\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.689062 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl2td\" (UniqueName: \"kubernetes.io/projected/07d5dc75-389c-464e-aba6-8d1a4dd1e736-kube-api-access-gl2td\") pod \"nmstate-handler-t75jg\" (UID: \"07d5dc75-389c-464e-aba6-8d1a4dd1e736\") " pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.689089 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/07d5dc75-389c-464e-aba6-8d1a4dd1e736-ovs-socket\") pod \"nmstate-handler-t75jg\" (UID: \"07d5dc75-389c-464e-aba6-8d1a4dd1e736\") " pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.689146 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2zht\" (UniqueName: \"kubernetes.io/projected/666ca373-3dfd-4cc5-bcbe-2d2cc8335b14-kube-api-access-p2zht\") pod \"nmstate-metrics-7f946cbc9-9gtf4\" (UID: \"666ca373-3dfd-4cc5-bcbe-2d2cc8335b14\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9gtf4" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.689173 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67glh\" (UniqueName: \"kubernetes.io/projected/031db5a0-79e6-4206-8b1a-200a5862e1d1-kube-api-access-67glh\") pod \"nmstate-webhook-5f6d4c5ccb-b9w27\" (UID: \"031db5a0-79e6-4206-8b1a-200a5862e1d1\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.690159 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/07d5dc75-389c-464e-aba6-8d1a4dd1e736-nmstate-lock\") pod \"nmstate-handler-t75jg\" (UID: \"07d5dc75-389c-464e-aba6-8d1a4dd1e736\") " pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.690454 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/07d5dc75-389c-464e-aba6-8d1a4dd1e736-dbus-socket\") pod \"nmstate-handler-t75jg\" (UID: \"07d5dc75-389c-464e-aba6-8d1a4dd1e736\") " pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.691822 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/07d5dc75-389c-464e-aba6-8d1a4dd1e736-ovs-socket\") pod \"nmstate-handler-t75jg\" (UID: \"07d5dc75-389c-464e-aba6-8d1a4dd1e736\") " pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.698899 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/031db5a0-79e6-4206-8b1a-200a5862e1d1-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-b9w27\" (UID: \"031db5a0-79e6-4206-8b1a-200a5862e1d1\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.713317 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl2td\" (UniqueName: \"kubernetes.io/projected/07d5dc75-389c-464e-aba6-8d1a4dd1e736-kube-api-access-gl2td\") pod \"nmstate-handler-t75jg\" (UID: \"07d5dc75-389c-464e-aba6-8d1a4dd1e736\") " pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.713630 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67glh\" (UniqueName: \"kubernetes.io/projected/031db5a0-79e6-4206-8b1a-200a5862e1d1-kube-api-access-67glh\") pod \"nmstate-webhook-5f6d4c5ccb-b9w27\" (UID: \"031db5a0-79e6-4206-8b1a-200a5862e1d1\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.723996 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2zht\" (UniqueName: \"kubernetes.io/projected/666ca373-3dfd-4cc5-bcbe-2d2cc8335b14-kube-api-access-p2zht\") pod \"nmstate-metrics-7f946cbc9-9gtf4\" (UID: \"666ca373-3dfd-4cc5-bcbe-2d2cc8335b14\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9gtf4" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.772639 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9gtf4" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.793693 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/4a6801dc-930c-4a3b-9a0d-2455e26011a7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-h2jzn\" (UID: \"4a6801dc-930c-4a3b-9a0d-2455e26011a7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.793800 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/4a6801dc-930c-4a3b-9a0d-2455e26011a7-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-h2jzn\" (UID: \"4a6801dc-930c-4a3b-9a0d-2455e26011a7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.793835 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw7kf\" (UniqueName: \"kubernetes.io/projected/4a6801dc-930c-4a3b-9a0d-2455e26011a7-kube-api-access-fw7kf\") pod \"nmstate-console-plugin-7fbb5f6569-h2jzn\" (UID: \"4a6801dc-930c-4a3b-9a0d-2455e26011a7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.866198 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.890883 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-76fc8f7b7f-lss4x"] Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.891928 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.894859 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-console-config\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.894911 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-console-serving-cert\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.894949 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n94sc\" (UniqueName: \"kubernetes.io/projected/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-kube-api-access-n94sc\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.894973 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-service-ca\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.895019 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/4a6801dc-930c-4a3b-9a0d-2455e26011a7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-h2jzn\" (UID: \"4a6801dc-930c-4a3b-9a0d-2455e26011a7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.895069 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-trusted-ca-bundle\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.895116 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/4a6801dc-930c-4a3b-9a0d-2455e26011a7-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-h2jzn\" (UID: \"4a6801dc-930c-4a3b-9a0d-2455e26011a7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.895156 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw7kf\" (UniqueName: \"kubernetes.io/projected/4a6801dc-930c-4a3b-9a0d-2455e26011a7-kube-api-access-fw7kf\") pod \"nmstate-console-plugin-7fbb5f6569-h2jzn\" (UID: \"4a6801dc-930c-4a3b-9a0d-2455e26011a7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.895181 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-console-oauth-config\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.895205 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-oauth-serving-cert\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: E1208 21:34:35.895393 4912 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 08 21:34:35 crc kubenswrapper[4912]: E1208 21:34:35.895453 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4a6801dc-930c-4a3b-9a0d-2455e26011a7-plugin-serving-cert podName:4a6801dc-930c-4a3b-9a0d-2455e26011a7 nodeName:}" failed. No retries permitted until 2025-12-08 21:34:36.395433654 +0000 UTC m=+958.258435737 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/4a6801dc-930c-4a3b-9a0d-2455e26011a7-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-h2jzn" (UID: "4a6801dc-930c-4a3b-9a0d-2455e26011a7") : secret "plugin-serving-cert" not found Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.896253 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.896902 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/4a6801dc-930c-4a3b-9a0d-2455e26011a7-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-h2jzn\" (UID: \"4a6801dc-930c-4a3b-9a0d-2455e26011a7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.905705 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-76fc8f7b7f-lss4x"] Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.944171 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw7kf\" (UniqueName: \"kubernetes.io/projected/4a6801dc-930c-4a3b-9a0d-2455e26011a7-kube-api-access-fw7kf\") pod \"nmstate-console-plugin-7fbb5f6569-h2jzn\" (UID: \"4a6801dc-930c-4a3b-9a0d-2455e26011a7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.996831 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-console-serving-cert\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.997094 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-console-config\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.997122 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n94sc\" (UniqueName: \"kubernetes.io/projected/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-kube-api-access-n94sc\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.997145 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-service-ca\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.997195 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-trusted-ca-bundle\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.997242 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-console-oauth-config\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.997259 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-oauth-serving-cert\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.998058 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-oauth-serving-cert\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.998497 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-console-config\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.998607 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-service-ca\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:35 crc kubenswrapper[4912]: I1208 21:34:35.999748 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-trusted-ca-bundle\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.001838 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-console-oauth-config\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.025094 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-console-serving-cert\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.034232 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n94sc\" (UniqueName: \"kubernetes.io/projected/789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3-kube-api-access-n94sc\") pod \"console-76fc8f7b7f-lss4x\" (UID: \"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3\") " pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.222564 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.282732 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-9gtf4"] Dec 08 21:34:36 crc kubenswrapper[4912]: W1208 21:34:36.296174 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod666ca373_3dfd_4cc5_bcbe_2d2cc8335b14.slice/crio-54aa6b8204b75cd059b3943a5a645edbc89dbd4ed2d20fb3bf8c59c5ca37ab9a WatchSource:0}: Error finding container 54aa6b8204b75cd059b3943a5a645edbc89dbd4ed2d20fb3bf8c59c5ca37ab9a: Status 404 returned error can't find the container with id 54aa6b8204b75cd059b3943a5a645edbc89dbd4ed2d20fb3bf8c59c5ca37ab9a Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.411004 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/4a6801dc-930c-4a3b-9a0d-2455e26011a7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-h2jzn\" (UID: \"4a6801dc-930c-4a3b-9a0d-2455e26011a7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.417478 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/4a6801dc-930c-4a3b-9a0d-2455e26011a7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-h2jzn\" (UID: \"4a6801dc-930c-4a3b-9a0d-2455e26011a7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.493103 4912 generic.go:334] "Generic (PLEG): container finished" podID="a06e1b61-071a-45a0-a973-dd4a8d45843e" containerID="a72d800f6055714cb62994a56e2dc1066172c94d1c75634f08ff323c8f2e90d2" exitCode=0 Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.493174 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwbjj" event={"ID":"a06e1b61-071a-45a0-a973-dd4a8d45843e","Type":"ContainerDied","Data":"a72d800f6055714cb62994a56e2dc1066172c94d1c75634f08ff323c8f2e90d2"} Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.495669 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-t75jg" event={"ID":"07d5dc75-389c-464e-aba6-8d1a4dd1e736","Type":"ContainerStarted","Data":"91e8db11bc0d0f13fc47bd5cf60fb3ae41f01a66967ee29072fe5cbafcf0e658"} Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.496822 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9gtf4" event={"ID":"666ca373-3dfd-4cc5-bcbe-2d2cc8335b14","Type":"ContainerStarted","Data":"54aa6b8204b75cd059b3943a5a645edbc89dbd4ed2d20fb3bf8c59c5ca37ab9a"} Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.542990 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-76fc8f7b7f-lss4x"] Dec 08 21:34:36 crc kubenswrapper[4912]: I1208 21:34:36.555945 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27"] Dec 08 21:34:37 crc kubenswrapper[4912]: I1208 21:34:37.258324 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-mqjsc" Dec 08 21:34:37 crc kubenswrapper[4912]: I1208 21:34:37.262725 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" Dec 08 21:34:37 crc kubenswrapper[4912]: I1208 21:34:37.519959 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-76fc8f7b7f-lss4x" event={"ID":"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3","Type":"ContainerStarted","Data":"0a7361181ddb8d84987bb3cf77cd075c16feffe16d285ddb2812f8cb4298144f"} Dec 08 21:34:37 crc kubenswrapper[4912]: I1208 21:34:37.520016 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-76fc8f7b7f-lss4x" event={"ID":"789b01cc-6bf4-47ef-a8ae-6f2d09cc4dc3","Type":"ContainerStarted","Data":"a7bb1119109c39786a5e4f2e56511049a17457104517dfd38260f77b364e0869"} Dec 08 21:34:37 crc kubenswrapper[4912]: I1208 21:34:37.531066 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwbjj" event={"ID":"a06e1b61-071a-45a0-a973-dd4a8d45843e","Type":"ContainerStarted","Data":"ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340"} Dec 08 21:34:37 crc kubenswrapper[4912]: I1208 21:34:37.533208 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" event={"ID":"031db5a0-79e6-4206-8b1a-200a5862e1d1","Type":"ContainerStarted","Data":"2d4d3bd4ad4bd2ec1cc59d8cc64b65643c92450653458e79b803a2325ab06fbe"} Dec 08 21:34:37 crc kubenswrapper[4912]: I1208 21:34:37.541397 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-76fc8f7b7f-lss4x" podStartSLOduration=2.541375714 podStartE2EDuration="2.541375714s" podCreationTimestamp="2025-12-08 21:34:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:34:37.538193817 +0000 UTC m=+959.401195910" watchObservedRunningTime="2025-12-08 21:34:37.541375714 +0000 UTC m=+959.404377807" Dec 08 21:34:37 crc kubenswrapper[4912]: I1208 21:34:37.565836 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xwbjj" podStartSLOduration=3.11830368 podStartE2EDuration="5.565821158s" podCreationTimestamp="2025-12-08 21:34:32 +0000 UTC" firstStartedPulling="2025-12-08 21:34:34.464269927 +0000 UTC m=+956.327272010" lastFinishedPulling="2025-12-08 21:34:36.911787405 +0000 UTC m=+958.774789488" observedRunningTime="2025-12-08 21:34:37.563539613 +0000 UTC m=+959.426541706" watchObservedRunningTime="2025-12-08 21:34:37.565821158 +0000 UTC m=+959.428823241" Dec 08 21:34:37 crc kubenswrapper[4912]: I1208 21:34:37.744446 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn"] Dec 08 21:34:37 crc kubenswrapper[4912]: W1208 21:34:37.767909 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a6801dc_930c_4a3b_9a0d_2455e26011a7.slice/crio-20ada415590b03bd99f0bb6c18bcfcc5170c7245d0b7ff747848a669c584b21d WatchSource:0}: Error finding container 20ada415590b03bd99f0bb6c18bcfcc5170c7245d0b7ff747848a669c584b21d: Status 404 returned error can't find the container with id 20ada415590b03bd99f0bb6c18bcfcc5170c7245d0b7ff747848a669c584b21d Dec 08 21:34:38 crc kubenswrapper[4912]: I1208 21:34:38.554048 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" event={"ID":"4a6801dc-930c-4a3b-9a0d-2455e26011a7","Type":"ContainerStarted","Data":"20ada415590b03bd99f0bb6c18bcfcc5170c7245d0b7ff747848a669c584b21d"} Dec 08 21:34:39 crc kubenswrapper[4912]: I1208 21:34:39.564599 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9gtf4" event={"ID":"666ca373-3dfd-4cc5-bcbe-2d2cc8335b14","Type":"ContainerStarted","Data":"239910ac3dcaa73b77255d05045dd9fc44e9878b7b05452f2eb7fec0ec429b34"} Dec 08 21:34:40 crc kubenswrapper[4912]: I1208 21:34:40.572969 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-t75jg" event={"ID":"07d5dc75-389c-464e-aba6-8d1a4dd1e736","Type":"ContainerStarted","Data":"5a899b244477c5f2eb111bb14b82682b9c64f853a8858f4bbe7a6ad77e368551"} Dec 08 21:34:40 crc kubenswrapper[4912]: I1208 21:34:40.573359 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:40 crc kubenswrapper[4912]: I1208 21:34:40.576174 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" event={"ID":"031db5a0-79e6-4206-8b1a-200a5862e1d1","Type":"ContainerStarted","Data":"9ed41744eb3fb132ad6b4ef7b5214f56104760d6216bd4287e8a8b3835b7ec0a"} Dec 08 21:34:40 crc kubenswrapper[4912]: I1208 21:34:40.576357 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" Dec 08 21:34:40 crc kubenswrapper[4912]: I1208 21:34:40.594074 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-t75jg" podStartSLOduration=2.265031158 podStartE2EDuration="5.594055986s" podCreationTimestamp="2025-12-08 21:34:35 +0000 UTC" firstStartedPulling="2025-12-08 21:34:36.017244185 +0000 UTC m=+957.880246268" lastFinishedPulling="2025-12-08 21:34:39.346269023 +0000 UTC m=+961.209271096" observedRunningTime="2025-12-08 21:34:40.593833951 +0000 UTC m=+962.456836044" watchObservedRunningTime="2025-12-08 21:34:40.594055986 +0000 UTC m=+962.457058069" Dec 08 21:34:40 crc kubenswrapper[4912]: I1208 21:34:40.618012 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" podStartSLOduration=2.8057041209999998 podStartE2EDuration="5.617992918s" podCreationTimestamp="2025-12-08 21:34:35 +0000 UTC" firstStartedPulling="2025-12-08 21:34:36.561154746 +0000 UTC m=+958.424156829" lastFinishedPulling="2025-12-08 21:34:39.373443543 +0000 UTC m=+961.236445626" observedRunningTime="2025-12-08 21:34:40.608737317 +0000 UTC m=+962.471739400" watchObservedRunningTime="2025-12-08 21:34:40.617992918 +0000 UTC m=+962.480995001" Dec 08 21:34:41 crc kubenswrapper[4912]: I1208 21:34:41.583645 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" event={"ID":"4a6801dc-930c-4a3b-9a0d-2455e26011a7","Type":"ContainerStarted","Data":"bc8388858676e6857bdf0366416d38a5b7b202f9877345dbb848d03763c95ef7"} Dec 08 21:34:41 crc kubenswrapper[4912]: I1208 21:34:41.602568 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h2jzn" podStartSLOduration=3.779354373 podStartE2EDuration="6.602551861s" podCreationTimestamp="2025-12-08 21:34:35 +0000 UTC" firstStartedPulling="2025-12-08 21:34:37.775156411 +0000 UTC m=+959.638158484" lastFinishedPulling="2025-12-08 21:34:40.598353889 +0000 UTC m=+962.461355972" observedRunningTime="2025-12-08 21:34:41.600568723 +0000 UTC m=+963.463570806" watchObservedRunningTime="2025-12-08 21:34:41.602551861 +0000 UTC m=+963.465553944" Dec 08 21:34:42 crc kubenswrapper[4912]: I1208 21:34:42.968549 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:42 crc kubenswrapper[4912]: I1208 21:34:42.969437 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:43 crc kubenswrapper[4912]: I1208 21:34:43.009482 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:43 crc kubenswrapper[4912]: I1208 21:34:43.598429 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9gtf4" event={"ID":"666ca373-3dfd-4cc5-bcbe-2d2cc8335b14","Type":"ContainerStarted","Data":"6e11f6d8ff6e604708b8cbc9fcaab629921b63e422dd0e82f177d9b236c375c5"} Dec 08 21:34:43 crc kubenswrapper[4912]: I1208 21:34:43.670365 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:43 crc kubenswrapper[4912]: I1208 21:34:43.686987 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9gtf4" podStartSLOduration=2.434219622 podStartE2EDuration="8.686968901s" podCreationTimestamp="2025-12-08 21:34:35 +0000 UTC" firstStartedPulling="2025-12-08 21:34:36.301199922 +0000 UTC m=+958.164202005" lastFinishedPulling="2025-12-08 21:34:42.553949201 +0000 UTC m=+964.416951284" observedRunningTime="2025-12-08 21:34:43.623185467 +0000 UTC m=+965.486187580" watchObservedRunningTime="2025-12-08 21:34:43.686968901 +0000 UTC m=+965.549970984" Dec 08 21:34:43 crc kubenswrapper[4912]: I1208 21:34:43.720143 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xwbjj"] Dec 08 21:34:45 crc kubenswrapper[4912]: I1208 21:34:45.609010 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xwbjj" podUID="a06e1b61-071a-45a0-a973-dd4a8d45843e" containerName="registry-server" containerID="cri-o://ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340" gracePeriod=2 Dec 08 21:34:45 crc kubenswrapper[4912]: I1208 21:34:45.922018 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-t75jg" Dec 08 21:34:45 crc kubenswrapper[4912]: I1208 21:34:45.963401 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.133838 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjzds\" (UniqueName: \"kubernetes.io/projected/a06e1b61-071a-45a0-a973-dd4a8d45843e-kube-api-access-fjzds\") pod \"a06e1b61-071a-45a0-a973-dd4a8d45843e\" (UID: \"a06e1b61-071a-45a0-a973-dd4a8d45843e\") " Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.133918 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a06e1b61-071a-45a0-a973-dd4a8d45843e-utilities\") pod \"a06e1b61-071a-45a0-a973-dd4a8d45843e\" (UID: \"a06e1b61-071a-45a0-a973-dd4a8d45843e\") " Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.134076 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a06e1b61-071a-45a0-a973-dd4a8d45843e-catalog-content\") pod \"a06e1b61-071a-45a0-a973-dd4a8d45843e\" (UID: \"a06e1b61-071a-45a0-a973-dd4a8d45843e\") " Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.134883 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a06e1b61-071a-45a0-a973-dd4a8d45843e-utilities" (OuterVolumeSpecName: "utilities") pod "a06e1b61-071a-45a0-a973-dd4a8d45843e" (UID: "a06e1b61-071a-45a0-a973-dd4a8d45843e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.139808 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a06e1b61-071a-45a0-a973-dd4a8d45843e-kube-api-access-fjzds" (OuterVolumeSpecName: "kube-api-access-fjzds") pod "a06e1b61-071a-45a0-a973-dd4a8d45843e" (UID: "a06e1b61-071a-45a0-a973-dd4a8d45843e"). InnerVolumeSpecName "kube-api-access-fjzds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.223278 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.223635 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.230635 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.235772 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjzds\" (UniqueName: \"kubernetes.io/projected/a06e1b61-071a-45a0-a973-dd4a8d45843e-kube-api-access-fjzds\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.236250 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a06e1b61-071a-45a0-a973-dd4a8d45843e-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.360734 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a06e1b61-071a-45a0-a973-dd4a8d45843e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a06e1b61-071a-45a0-a973-dd4a8d45843e" (UID: "a06e1b61-071a-45a0-a973-dd4a8d45843e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.440340 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a06e1b61-071a-45a0-a973-dd4a8d45843e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.616302 4912 generic.go:334] "Generic (PLEG): container finished" podID="a06e1b61-071a-45a0-a973-dd4a8d45843e" containerID="ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340" exitCode=0 Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.616381 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwbjj" event={"ID":"a06e1b61-071a-45a0-a973-dd4a8d45843e","Type":"ContainerDied","Data":"ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340"} Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.616420 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwbjj" event={"ID":"a06e1b61-071a-45a0-a973-dd4a8d45843e","Type":"ContainerDied","Data":"3748d7fa15847d37496cf4a403bf11b5ce0052f0c7a50b582d70377e74e7f2a4"} Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.616443 4912 scope.go:117] "RemoveContainer" containerID="ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.617424 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xwbjj" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.621571 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-76fc8f7b7f-lss4x" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.635328 4912 scope.go:117] "RemoveContainer" containerID="a72d800f6055714cb62994a56e2dc1066172c94d1c75634f08ff323c8f2e90d2" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.638700 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xwbjj"] Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.644850 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xwbjj"] Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.669179 4912 scope.go:117] "RemoveContainer" containerID="ceb6a930cbdae02cf64b83f9a74dd07925d899d5e4863ba47d2a4a6ff955a4e8" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.712094 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-46knq"] Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.712132 4912 scope.go:117] "RemoveContainer" containerID="ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340" Dec 08 21:34:46 crc kubenswrapper[4912]: E1208 21:34:46.713517 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340\": container with ID starting with ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340 not found: ID does not exist" containerID="ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.713564 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340"} err="failed to get container status \"ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340\": rpc error: code = NotFound desc = could not find container \"ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340\": container with ID starting with ae834a69f76457794c646a0aa4025f0fb6db7d6a9f9c3915176c201467935340 not found: ID does not exist" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.713593 4912 scope.go:117] "RemoveContainer" containerID="a72d800f6055714cb62994a56e2dc1066172c94d1c75634f08ff323c8f2e90d2" Dec 08 21:34:46 crc kubenswrapper[4912]: E1208 21:34:46.718762 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a72d800f6055714cb62994a56e2dc1066172c94d1c75634f08ff323c8f2e90d2\": container with ID starting with a72d800f6055714cb62994a56e2dc1066172c94d1c75634f08ff323c8f2e90d2 not found: ID does not exist" containerID="a72d800f6055714cb62994a56e2dc1066172c94d1c75634f08ff323c8f2e90d2" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.718898 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a72d800f6055714cb62994a56e2dc1066172c94d1c75634f08ff323c8f2e90d2"} err="failed to get container status \"a72d800f6055714cb62994a56e2dc1066172c94d1c75634f08ff323c8f2e90d2\": rpc error: code = NotFound desc = could not find container \"a72d800f6055714cb62994a56e2dc1066172c94d1c75634f08ff323c8f2e90d2\": container with ID starting with a72d800f6055714cb62994a56e2dc1066172c94d1c75634f08ff323c8f2e90d2 not found: ID does not exist" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.718985 4912 scope.go:117] "RemoveContainer" containerID="ceb6a930cbdae02cf64b83f9a74dd07925d899d5e4863ba47d2a4a6ff955a4e8" Dec 08 21:34:46 crc kubenswrapper[4912]: E1208 21:34:46.722192 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceb6a930cbdae02cf64b83f9a74dd07925d899d5e4863ba47d2a4a6ff955a4e8\": container with ID starting with ceb6a930cbdae02cf64b83f9a74dd07925d899d5e4863ba47d2a4a6ff955a4e8 not found: ID does not exist" containerID="ceb6a930cbdae02cf64b83f9a74dd07925d899d5e4863ba47d2a4a6ff955a4e8" Dec 08 21:34:46 crc kubenswrapper[4912]: I1208 21:34:46.722235 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceb6a930cbdae02cf64b83f9a74dd07925d899d5e4863ba47d2a4a6ff955a4e8"} err="failed to get container status \"ceb6a930cbdae02cf64b83f9a74dd07925d899d5e4863ba47d2a4a6ff955a4e8\": rpc error: code = NotFound desc = could not find container \"ceb6a930cbdae02cf64b83f9a74dd07925d899d5e4863ba47d2a4a6ff955a4e8\": container with ID starting with ceb6a930cbdae02cf64b83f9a74dd07925d899d5e4863ba47d2a4a6ff955a4e8 not found: ID does not exist" Dec 08 21:34:48 crc kubenswrapper[4912]: I1208 21:34:48.436868 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a06e1b61-071a-45a0-a973-dd4a8d45843e" path="/var/lib/kubelet/pods/a06e1b61-071a-45a0-a973-dd4a8d45843e/volumes" Dec 08 21:34:55 crc kubenswrapper[4912]: I1208 21:34:55.874006 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-b9w27" Dec 08 21:35:02 crc kubenswrapper[4912]: I1208 21:35:02.964834 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:35:02 crc kubenswrapper[4912]: I1208 21:35:02.965517 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.332901 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692"] Dec 08 21:35:11 crc kubenswrapper[4912]: E1208 21:35:11.334446 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a06e1b61-071a-45a0-a973-dd4a8d45843e" containerName="registry-server" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.334501 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a06e1b61-071a-45a0-a973-dd4a8d45843e" containerName="registry-server" Dec 08 21:35:11 crc kubenswrapper[4912]: E1208 21:35:11.334526 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a06e1b61-071a-45a0-a973-dd4a8d45843e" containerName="extract-utilities" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.334535 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a06e1b61-071a-45a0-a973-dd4a8d45843e" containerName="extract-utilities" Dec 08 21:35:11 crc kubenswrapper[4912]: E1208 21:35:11.335220 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a06e1b61-071a-45a0-a973-dd4a8d45843e" containerName="extract-content" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.335249 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a06e1b61-071a-45a0-a973-dd4a8d45843e" containerName="extract-content" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.335696 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="a06e1b61-071a-45a0-a973-dd4a8d45843e" containerName="registry-server" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.343108 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.350358 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.353255 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692"] Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.417954 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692\" (UID: \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.418146 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hknq\" (UniqueName: \"kubernetes.io/projected/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-kube-api-access-4hknq\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692\" (UID: \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.418211 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692\" (UID: \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.519717 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692\" (UID: \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.519802 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hknq\" (UniqueName: \"kubernetes.io/projected/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-kube-api-access-4hknq\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692\" (UID: \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.519842 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692\" (UID: \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.520273 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692\" (UID: \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.520533 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692\" (UID: \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.542293 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hknq\" (UniqueName: \"kubernetes.io/projected/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-kube-api-access-4hknq\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692\" (UID: \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.713292 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.777127 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-46knq" podUID="1627b83a-6756-4797-b857-7495c262d53c" containerName="console" containerID="cri-o://bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db" gracePeriod=15 Dec 08 21:35:11 crc kubenswrapper[4912]: I1208 21:35:11.961521 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692"] Dec 08 21:35:11 crc kubenswrapper[4912]: W1208 21:35:11.971765 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc14041a7_869f_4553_8da9_0ac2c0ca9d7a.slice/crio-0995e93edfc754d0718e973f95c27e81b7ea7055d8bd0fd607d36d47b9386007 WatchSource:0}: Error finding container 0995e93edfc754d0718e973f95c27e81b7ea7055d8bd0fd607d36d47b9386007: Status 404 returned error can't find the container with id 0995e93edfc754d0718e973f95c27e81b7ea7055d8bd0fd607d36d47b9386007 Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.128260 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-46knq_1627b83a-6756-4797-b857-7495c262d53c/console/0.log" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.128336 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.230252 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5zm5\" (UniqueName: \"kubernetes.io/projected/1627b83a-6756-4797-b857-7495c262d53c-kube-api-access-k5zm5\") pod \"1627b83a-6756-4797-b857-7495c262d53c\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.230400 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1627b83a-6756-4797-b857-7495c262d53c-console-oauth-config\") pod \"1627b83a-6756-4797-b857-7495c262d53c\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.230423 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-console-config\") pod \"1627b83a-6756-4797-b857-7495c262d53c\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.230464 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-oauth-serving-cert\") pod \"1627b83a-6756-4797-b857-7495c262d53c\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.230484 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-service-ca\") pod \"1627b83a-6756-4797-b857-7495c262d53c\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.230547 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-trusted-ca-bundle\") pod \"1627b83a-6756-4797-b857-7495c262d53c\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.230572 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1627b83a-6756-4797-b857-7495c262d53c-console-serving-cert\") pod \"1627b83a-6756-4797-b857-7495c262d53c\" (UID: \"1627b83a-6756-4797-b857-7495c262d53c\") " Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.231133 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "1627b83a-6756-4797-b857-7495c262d53c" (UID: "1627b83a-6756-4797-b857-7495c262d53c"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.231666 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1627b83a-6756-4797-b857-7495c262d53c" (UID: "1627b83a-6756-4797-b857-7495c262d53c"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.231809 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-service-ca" (OuterVolumeSpecName: "service-ca") pod "1627b83a-6756-4797-b857-7495c262d53c" (UID: "1627b83a-6756-4797-b857-7495c262d53c"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.232258 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-console-config" (OuterVolumeSpecName: "console-config") pod "1627b83a-6756-4797-b857-7495c262d53c" (UID: "1627b83a-6756-4797-b857-7495c262d53c"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.236508 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1627b83a-6756-4797-b857-7495c262d53c-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "1627b83a-6756-4797-b857-7495c262d53c" (UID: "1627b83a-6756-4797-b857-7495c262d53c"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.236684 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1627b83a-6756-4797-b857-7495c262d53c-kube-api-access-k5zm5" (OuterVolumeSpecName: "kube-api-access-k5zm5") pod "1627b83a-6756-4797-b857-7495c262d53c" (UID: "1627b83a-6756-4797-b857-7495c262d53c"). InnerVolumeSpecName "kube-api-access-k5zm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.236856 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1627b83a-6756-4797-b857-7495c262d53c-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "1627b83a-6756-4797-b857-7495c262d53c" (UID: "1627b83a-6756-4797-b857-7495c262d53c"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.331640 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5zm5\" (UniqueName: \"kubernetes.io/projected/1627b83a-6756-4797-b857-7495c262d53c-kube-api-access-k5zm5\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.331899 4912 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1627b83a-6756-4797-b857-7495c262d53c-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.331975 4912 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-console-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.332063 4912 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.332139 4912 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.332259 4912 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1627b83a-6756-4797-b857-7495c262d53c-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.332345 4912 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1627b83a-6756-4797-b857-7495c262d53c-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.792921 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-46knq_1627b83a-6756-4797-b857-7495c262d53c/console/0.log" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.792975 4912 generic.go:334] "Generic (PLEG): container finished" podID="1627b83a-6756-4797-b857-7495c262d53c" containerID="bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db" exitCode=2 Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.793050 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-46knq" event={"ID":"1627b83a-6756-4797-b857-7495c262d53c","Type":"ContainerDied","Data":"bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db"} Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.793089 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-46knq" event={"ID":"1627b83a-6756-4797-b857-7495c262d53c","Type":"ContainerDied","Data":"9d468ca7f933ce9d7be35b11ac2d8a9d2aa89d76d2cf70901ced70a91e67261b"} Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.793109 4912 scope.go:117] "RemoveContainer" containerID="bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.793135 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-46knq" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.798557 4912 generic.go:334] "Generic (PLEG): container finished" podID="c14041a7-869f-4553-8da9-0ac2c0ca9d7a" containerID="81183415d42d5eb9afe141d6a2c3dd2cf8a49e8a8938c82686787a920460d410" exitCode=0 Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.798599 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" event={"ID":"c14041a7-869f-4553-8da9-0ac2c0ca9d7a","Type":"ContainerDied","Data":"81183415d42d5eb9afe141d6a2c3dd2cf8a49e8a8938c82686787a920460d410"} Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.798622 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" event={"ID":"c14041a7-869f-4553-8da9-0ac2c0ca9d7a","Type":"ContainerStarted","Data":"0995e93edfc754d0718e973f95c27e81b7ea7055d8bd0fd607d36d47b9386007"} Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.819264 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-46knq"] Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.823020 4912 scope.go:117] "RemoveContainer" containerID="bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db" Dec 08 21:35:12 crc kubenswrapper[4912]: E1208 21:35:12.823696 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db\": container with ID starting with bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db not found: ID does not exist" containerID="bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.823757 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db"} err="failed to get container status \"bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db\": rpc error: code = NotFound desc = could not find container \"bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db\": container with ID starting with bb0ffe3c9c798559f1f56c5b0c3ba45910781b91bf56a9f421316646c4c563db not found: ID does not exist" Dec 08 21:35:12 crc kubenswrapper[4912]: I1208 21:35:12.825499 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-46knq"] Dec 08 21:35:14 crc kubenswrapper[4912]: I1208 21:35:14.436420 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1627b83a-6756-4797-b857-7495c262d53c" path="/var/lib/kubelet/pods/1627b83a-6756-4797-b857-7495c262d53c/volumes" Dec 08 21:35:14 crc kubenswrapper[4912]: I1208 21:35:14.834216 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" event={"ID":"c14041a7-869f-4553-8da9-0ac2c0ca9d7a","Type":"ContainerStarted","Data":"8e88a094e8c61f662ea52fd1ed2075771681d9e2a3a1c2515d792f7bbeedefdf"} Dec 08 21:35:15 crc kubenswrapper[4912]: I1208 21:35:15.842338 4912 generic.go:334] "Generic (PLEG): container finished" podID="c14041a7-869f-4553-8da9-0ac2c0ca9d7a" containerID="8e88a094e8c61f662ea52fd1ed2075771681d9e2a3a1c2515d792f7bbeedefdf" exitCode=0 Dec 08 21:35:15 crc kubenswrapper[4912]: I1208 21:35:15.842476 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" event={"ID":"c14041a7-869f-4553-8da9-0ac2c0ca9d7a","Type":"ContainerDied","Data":"8e88a094e8c61f662ea52fd1ed2075771681d9e2a3a1c2515d792f7bbeedefdf"} Dec 08 21:35:16 crc kubenswrapper[4912]: I1208 21:35:16.851408 4912 generic.go:334] "Generic (PLEG): container finished" podID="c14041a7-869f-4553-8da9-0ac2c0ca9d7a" containerID="c27c8e5603ba82a07bc967936ea650cbafdc9ced4f5a01c34483b765c3c5424a" exitCode=0 Dec 08 21:35:16 crc kubenswrapper[4912]: I1208 21:35:16.851525 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" event={"ID":"c14041a7-869f-4553-8da9-0ac2c0ca9d7a","Type":"ContainerDied","Data":"c27c8e5603ba82a07bc967936ea650cbafdc9ced4f5a01c34483b765c3c5424a"} Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.144027 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.316786 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hknq\" (UniqueName: \"kubernetes.io/projected/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-kube-api-access-4hknq\") pod \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\" (UID: \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\") " Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.316940 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-bundle\") pod \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\" (UID: \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\") " Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.316996 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-util\") pod \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\" (UID: \"c14041a7-869f-4553-8da9-0ac2c0ca9d7a\") " Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.318069 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-bundle" (OuterVolumeSpecName: "bundle") pod "c14041a7-869f-4553-8da9-0ac2c0ca9d7a" (UID: "c14041a7-869f-4553-8da9-0ac2c0ca9d7a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.322607 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-kube-api-access-4hknq" (OuterVolumeSpecName: "kube-api-access-4hknq") pod "c14041a7-869f-4553-8da9-0ac2c0ca9d7a" (UID: "c14041a7-869f-4553-8da9-0ac2c0ca9d7a"). InnerVolumeSpecName "kube-api-access-4hknq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.328876 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-util" (OuterVolumeSpecName: "util") pod "c14041a7-869f-4553-8da9-0ac2c0ca9d7a" (UID: "c14041a7-869f-4553-8da9-0ac2c0ca9d7a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.418789 4912 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.418834 4912 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-util\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.418845 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hknq\" (UniqueName: \"kubernetes.io/projected/c14041a7-869f-4553-8da9-0ac2c0ca9d7a-kube-api-access-4hknq\") on node \"crc\" DevicePath \"\"" Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.866283 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" event={"ID":"c14041a7-869f-4553-8da9-0ac2c0ca9d7a","Type":"ContainerDied","Data":"0995e93edfc754d0718e973f95c27e81b7ea7055d8bd0fd607d36d47b9386007"} Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.866334 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0995e93edfc754d0718e973f95c27e81b7ea7055d8bd0fd607d36d47b9386007" Dec 08 21:35:18 crc kubenswrapper[4912]: I1208 21:35:18.866399 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.052799 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr"] Dec 08 21:35:30 crc kubenswrapper[4912]: E1208 21:35:30.053699 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c14041a7-869f-4553-8da9-0ac2c0ca9d7a" containerName="pull" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.053714 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c14041a7-869f-4553-8da9-0ac2c0ca9d7a" containerName="pull" Dec 08 21:35:30 crc kubenswrapper[4912]: E1208 21:35:30.053729 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1627b83a-6756-4797-b857-7495c262d53c" containerName="console" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.053922 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="1627b83a-6756-4797-b857-7495c262d53c" containerName="console" Dec 08 21:35:30 crc kubenswrapper[4912]: E1208 21:35:30.053937 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c14041a7-869f-4553-8da9-0ac2c0ca9d7a" containerName="extract" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.053946 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c14041a7-869f-4553-8da9-0ac2c0ca9d7a" containerName="extract" Dec 08 21:35:30 crc kubenswrapper[4912]: E1208 21:35:30.053958 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c14041a7-869f-4553-8da9-0ac2c0ca9d7a" containerName="util" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.053967 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c14041a7-869f-4553-8da9-0ac2c0ca9d7a" containerName="util" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.054114 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="c14041a7-869f-4553-8da9-0ac2c0ca9d7a" containerName="extract" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.054132 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="1627b83a-6756-4797-b857-7495c262d53c" containerName="console" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.054605 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.058070 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.058072 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.058406 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.058717 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-8cjxb" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.058970 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.070501 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr"] Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.089940 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1ae06dae-d5d5-4365-abca-c7a177b0fb56-apiservice-cert\") pod \"metallb-operator-controller-manager-c7c694577-x2mvr\" (UID: \"1ae06dae-d5d5-4365-abca-c7a177b0fb56\") " pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.090027 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1ae06dae-d5d5-4365-abca-c7a177b0fb56-webhook-cert\") pod \"metallb-operator-controller-manager-c7c694577-x2mvr\" (UID: \"1ae06dae-d5d5-4365-abca-c7a177b0fb56\") " pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.090074 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t668z\" (UniqueName: \"kubernetes.io/projected/1ae06dae-d5d5-4365-abca-c7a177b0fb56-kube-api-access-t668z\") pod \"metallb-operator-controller-manager-c7c694577-x2mvr\" (UID: \"1ae06dae-d5d5-4365-abca-c7a177b0fb56\") " pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.191190 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1ae06dae-d5d5-4365-abca-c7a177b0fb56-webhook-cert\") pod \"metallb-operator-controller-manager-c7c694577-x2mvr\" (UID: \"1ae06dae-d5d5-4365-abca-c7a177b0fb56\") " pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.191254 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t668z\" (UniqueName: \"kubernetes.io/projected/1ae06dae-d5d5-4365-abca-c7a177b0fb56-kube-api-access-t668z\") pod \"metallb-operator-controller-manager-c7c694577-x2mvr\" (UID: \"1ae06dae-d5d5-4365-abca-c7a177b0fb56\") " pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.191310 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1ae06dae-d5d5-4365-abca-c7a177b0fb56-apiservice-cert\") pod \"metallb-operator-controller-manager-c7c694577-x2mvr\" (UID: \"1ae06dae-d5d5-4365-abca-c7a177b0fb56\") " pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.197788 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1ae06dae-d5d5-4365-abca-c7a177b0fb56-webhook-cert\") pod \"metallb-operator-controller-manager-c7c694577-x2mvr\" (UID: \"1ae06dae-d5d5-4365-abca-c7a177b0fb56\") " pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.197797 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1ae06dae-d5d5-4365-abca-c7a177b0fb56-apiservice-cert\") pod \"metallb-operator-controller-manager-c7c694577-x2mvr\" (UID: \"1ae06dae-d5d5-4365-abca-c7a177b0fb56\") " pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.213027 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t668z\" (UniqueName: \"kubernetes.io/projected/1ae06dae-d5d5-4365-abca-c7a177b0fb56-kube-api-access-t668z\") pod \"metallb-operator-controller-manager-c7c694577-x2mvr\" (UID: \"1ae06dae-d5d5-4365-abca-c7a177b0fb56\") " pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.284605 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-84d7d76888-grwms"] Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.285672 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.289587 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.289622 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.289602 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-6jxzr" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.317213 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-84d7d76888-grwms"] Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.373341 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.394395 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/033dc0ca-3e1a-4c56-a687-bedb4849f5cd-apiservice-cert\") pod \"metallb-operator-webhook-server-84d7d76888-grwms\" (UID: \"033dc0ca-3e1a-4c56-a687-bedb4849f5cd\") " pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.394485 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x4fx\" (UniqueName: \"kubernetes.io/projected/033dc0ca-3e1a-4c56-a687-bedb4849f5cd-kube-api-access-2x4fx\") pod \"metallb-operator-webhook-server-84d7d76888-grwms\" (UID: \"033dc0ca-3e1a-4c56-a687-bedb4849f5cd\") " pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.394630 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/033dc0ca-3e1a-4c56-a687-bedb4849f5cd-webhook-cert\") pod \"metallb-operator-webhook-server-84d7d76888-grwms\" (UID: \"033dc0ca-3e1a-4c56-a687-bedb4849f5cd\") " pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.496908 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x4fx\" (UniqueName: \"kubernetes.io/projected/033dc0ca-3e1a-4c56-a687-bedb4849f5cd-kube-api-access-2x4fx\") pod \"metallb-operator-webhook-server-84d7d76888-grwms\" (UID: \"033dc0ca-3e1a-4c56-a687-bedb4849f5cd\") " pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.497271 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/033dc0ca-3e1a-4c56-a687-bedb4849f5cd-webhook-cert\") pod \"metallb-operator-webhook-server-84d7d76888-grwms\" (UID: \"033dc0ca-3e1a-4c56-a687-bedb4849f5cd\") " pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.497319 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/033dc0ca-3e1a-4c56-a687-bedb4849f5cd-apiservice-cert\") pod \"metallb-operator-webhook-server-84d7d76888-grwms\" (UID: \"033dc0ca-3e1a-4c56-a687-bedb4849f5cd\") " pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.502329 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/033dc0ca-3e1a-4c56-a687-bedb4849f5cd-webhook-cert\") pod \"metallb-operator-webhook-server-84d7d76888-grwms\" (UID: \"033dc0ca-3e1a-4c56-a687-bedb4849f5cd\") " pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.502860 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/033dc0ca-3e1a-4c56-a687-bedb4849f5cd-apiservice-cert\") pod \"metallb-operator-webhook-server-84d7d76888-grwms\" (UID: \"033dc0ca-3e1a-4c56-a687-bedb4849f5cd\") " pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.514780 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x4fx\" (UniqueName: \"kubernetes.io/projected/033dc0ca-3e1a-4c56-a687-bedb4849f5cd-kube-api-access-2x4fx\") pod \"metallb-operator-webhook-server-84d7d76888-grwms\" (UID: \"033dc0ca-3e1a-4c56-a687-bedb4849f5cd\") " pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.601099 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.860551 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr"] Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.901890 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-84d7d76888-grwms"] Dec 08 21:35:30 crc kubenswrapper[4912]: W1208 21:35:30.914586 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod033dc0ca_3e1a_4c56_a687_bedb4849f5cd.slice/crio-6abc9e1c02483a2d000d14df307b30dfd1e6384b812b5de92a28e19302d1cf18 WatchSource:0}: Error finding container 6abc9e1c02483a2d000d14df307b30dfd1e6384b812b5de92a28e19302d1cf18: Status 404 returned error can't find the container with id 6abc9e1c02483a2d000d14df307b30dfd1e6384b812b5de92a28e19302d1cf18 Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.938744 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" event={"ID":"033dc0ca-3e1a-4c56-a687-bedb4849f5cd","Type":"ContainerStarted","Data":"6abc9e1c02483a2d000d14df307b30dfd1e6384b812b5de92a28e19302d1cf18"} Dec 08 21:35:30 crc kubenswrapper[4912]: I1208 21:35:30.940034 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" event={"ID":"1ae06dae-d5d5-4365-abca-c7a177b0fb56","Type":"ContainerStarted","Data":"2e3c66520a16e7e9cfa5bf523498c190992596dac22059fbf041385d4134ac4f"} Dec 08 21:35:32 crc kubenswrapper[4912]: I1208 21:35:32.965254 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:35:32 crc kubenswrapper[4912]: I1208 21:35:32.965752 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:35:37 crc kubenswrapper[4912]: I1208 21:35:37.005019 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" event={"ID":"033dc0ca-3e1a-4c56-a687-bedb4849f5cd","Type":"ContainerStarted","Data":"1ac270932ec7720986928579b9948d59bbe919687c37483a0732370d12f21bd6"} Dec 08 21:35:37 crc kubenswrapper[4912]: I1208 21:35:37.005408 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:35:37 crc kubenswrapper[4912]: I1208 21:35:37.007834 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" event={"ID":"1ae06dae-d5d5-4365-abca-c7a177b0fb56","Type":"ContainerStarted","Data":"a2b88d966622d2f4aa4e32136a5c2a02c8fc454655e346ed426f8c2399578e84"} Dec 08 21:35:37 crc kubenswrapper[4912]: I1208 21:35:37.008020 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:35:37 crc kubenswrapper[4912]: I1208 21:35:37.028698 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" podStartSLOduration=1.894551276 podStartE2EDuration="7.028678589s" podCreationTimestamp="2025-12-08 21:35:30 +0000 UTC" firstStartedPulling="2025-12-08 21:35:30.924692327 +0000 UTC m=+1012.787694410" lastFinishedPulling="2025-12-08 21:35:36.05881964 +0000 UTC m=+1017.921821723" observedRunningTime="2025-12-08 21:35:37.024014053 +0000 UTC m=+1018.887016156" watchObservedRunningTime="2025-12-08 21:35:37.028678589 +0000 UTC m=+1018.891680672" Dec 08 21:35:37 crc kubenswrapper[4912]: I1208 21:35:37.055168 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" podStartSLOduration=1.887900637 podStartE2EDuration="7.055146141s" podCreationTimestamp="2025-12-08 21:35:30 +0000 UTC" firstStartedPulling="2025-12-08 21:35:30.869179593 +0000 UTC m=+1012.732181676" lastFinishedPulling="2025-12-08 21:35:36.036425097 +0000 UTC m=+1017.899427180" observedRunningTime="2025-12-08 21:35:37.051477466 +0000 UTC m=+1018.914479559" watchObservedRunningTime="2025-12-08 21:35:37.055146141 +0000 UTC m=+1018.918148224" Dec 08 21:35:50 crc kubenswrapper[4912]: I1208 21:35:50.607440 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-84d7d76888-grwms" Dec 08 21:36:02 crc kubenswrapper[4912]: I1208 21:36:02.964854 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:36:02 crc kubenswrapper[4912]: I1208 21:36:02.965242 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:36:02 crc kubenswrapper[4912]: I1208 21:36:02.965290 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:36:03 crc kubenswrapper[4912]: I1208 21:36:03.172638 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e561243c0a08b6a3c2b979f79371dccf1fa3fd8f2001bd69298ef149e0ac707f"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:36:03 crc kubenswrapper[4912]: I1208 21:36:03.172710 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://e561243c0a08b6a3c2b979f79371dccf1fa3fd8f2001bd69298ef149e0ac707f" gracePeriod=600 Dec 08 21:36:04 crc kubenswrapper[4912]: I1208 21:36:04.184766 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="e561243c0a08b6a3c2b979f79371dccf1fa3fd8f2001bd69298ef149e0ac707f" exitCode=0 Dec 08 21:36:04 crc kubenswrapper[4912]: I1208 21:36:04.184833 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"e561243c0a08b6a3c2b979f79371dccf1fa3fd8f2001bd69298ef149e0ac707f"} Dec 08 21:36:04 crc kubenswrapper[4912]: I1208 21:36:04.184888 4912 scope.go:117] "RemoveContainer" containerID="8106664a8b2aa8fe44f0e624ddefd9ea7c76fb2bf84c756329c17f67bc09391d" Dec 08 21:36:05 crc kubenswrapper[4912]: I1208 21:36:05.193111 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"2bbe83801f5e4f664de5ce4a79737a9126b08b32fb28e3b53cf865ffeb56f1e8"} Dec 08 21:36:10 crc kubenswrapper[4912]: I1208 21:36:10.376494 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-c7c694577-x2mvr" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.214804 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw"] Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.215661 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.217663 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.219049 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-69vjr" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.220803 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-tt6k7"] Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.223729 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.226174 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.229119 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.234328 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw"] Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.295325 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-865qd\" (UniqueName: \"kubernetes.io/projected/0b46dd74-03d1-4aa9-8bae-24cc229206aa-kube-api-access-865qd\") pod \"frr-k8s-webhook-server-7fcb986d4-h6ntw\" (UID: \"0b46dd74-03d1-4aa9-8bae-24cc229206aa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.295387 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-metrics-certs\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.295435 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-frr-conf\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.295462 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0b46dd74-03d1-4aa9-8bae-24cc229206aa-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-h6ntw\" (UID: \"0b46dd74-03d1-4aa9-8bae-24cc229206aa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.295504 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-frr-sockets\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.295561 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99kcg\" (UniqueName: \"kubernetes.io/projected/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-kube-api-access-99kcg\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.295585 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-frr-startup\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.295605 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-metrics\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.295736 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-reloader\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.313958 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-xrxk5"] Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.314948 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-xrxk5" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.316496 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.316564 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-6jb65" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.320086 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.321521 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.328346 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-rbzfs"] Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.329338 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.330967 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.349712 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-rbzfs"] Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396485 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5348f01f-774a-4d17-9a26-ddc251ec89f8-cert\") pod \"controller-f8648f98b-rbzfs\" (UID: \"5348f01f-774a-4d17-9a26-ddc251ec89f8\") " pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396532 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-frr-conf\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396562 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0b46dd74-03d1-4aa9-8bae-24cc229206aa-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-h6ntw\" (UID: \"0b46dd74-03d1-4aa9-8bae-24cc229206aa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396584 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-824br\" (UniqueName: \"kubernetes.io/projected/60cae323-cb71-49a6-90e4-7ac76e98ec75-kube-api-access-824br\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396611 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-frr-sockets\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396637 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2zdk\" (UniqueName: \"kubernetes.io/projected/5348f01f-774a-4d17-9a26-ddc251ec89f8-kube-api-access-t2zdk\") pod \"controller-f8648f98b-rbzfs\" (UID: \"5348f01f-774a-4d17-9a26-ddc251ec89f8\") " pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396677 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-metrics-certs\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396696 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5348f01f-774a-4d17-9a26-ddc251ec89f8-metrics-certs\") pod \"controller-f8648f98b-rbzfs\" (UID: \"5348f01f-774a-4d17-9a26-ddc251ec89f8\") " pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396715 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-memberlist\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:11 crc kubenswrapper[4912]: E1208 21:36:11.396693 4912 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 08 21:36:11 crc kubenswrapper[4912]: E1208 21:36:11.396808 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b46dd74-03d1-4aa9-8bae-24cc229206aa-cert podName:0b46dd74-03d1-4aa9-8bae-24cc229206aa nodeName:}" failed. No retries permitted until 2025-12-08 21:36:11.896783195 +0000 UTC m=+1053.759785368 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0b46dd74-03d1-4aa9-8bae-24cc229206aa-cert") pod "frr-k8s-webhook-server-7fcb986d4-h6ntw" (UID: "0b46dd74-03d1-4aa9-8bae-24cc229206aa") : secret "frr-k8s-webhook-server-cert" not found Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396733 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99kcg\" (UniqueName: \"kubernetes.io/projected/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-kube-api-access-99kcg\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396891 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-frr-startup\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396925 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-metrics\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.396980 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-reloader\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.397003 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/60cae323-cb71-49a6-90e4-7ac76e98ec75-metallb-excludel2\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.397009 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-frr-conf\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.397164 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-865qd\" (UniqueName: \"kubernetes.io/projected/0b46dd74-03d1-4aa9-8bae-24cc229206aa-kube-api-access-865qd\") pod \"frr-k8s-webhook-server-7fcb986d4-h6ntw\" (UID: \"0b46dd74-03d1-4aa9-8bae-24cc229206aa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.397189 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-metrics-certs\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.397230 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-frr-sockets\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: E1208 21:36:11.397295 4912 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.397305 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-reloader\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: E1208 21:36:11.397346 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-metrics-certs podName:c03a9518-39ca-436c-84ca-d02a8e6ef6d7 nodeName:}" failed. No retries permitted until 2025-12-08 21:36:11.897330392 +0000 UTC m=+1053.760332475 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-metrics-certs") pod "frr-k8s-tt6k7" (UID: "c03a9518-39ca-436c-84ca-d02a8e6ef6d7") : secret "frr-k8s-certs-secret" not found Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.397412 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-metrics\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.397819 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-frr-startup\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.417854 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-865qd\" (UniqueName: \"kubernetes.io/projected/0b46dd74-03d1-4aa9-8bae-24cc229206aa-kube-api-access-865qd\") pod \"frr-k8s-webhook-server-7fcb986d4-h6ntw\" (UID: \"0b46dd74-03d1-4aa9-8bae-24cc229206aa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.433701 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99kcg\" (UniqueName: \"kubernetes.io/projected/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-kube-api-access-99kcg\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.497901 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/60cae323-cb71-49a6-90e4-7ac76e98ec75-metallb-excludel2\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.498007 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5348f01f-774a-4d17-9a26-ddc251ec89f8-cert\") pod \"controller-f8648f98b-rbzfs\" (UID: \"5348f01f-774a-4d17-9a26-ddc251ec89f8\") " pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.498051 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-824br\" (UniqueName: \"kubernetes.io/projected/60cae323-cb71-49a6-90e4-7ac76e98ec75-kube-api-access-824br\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.498092 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2zdk\" (UniqueName: \"kubernetes.io/projected/5348f01f-774a-4d17-9a26-ddc251ec89f8-kube-api-access-t2zdk\") pod \"controller-f8648f98b-rbzfs\" (UID: \"5348f01f-774a-4d17-9a26-ddc251ec89f8\") " pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.498123 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-metrics-certs\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.498157 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5348f01f-774a-4d17-9a26-ddc251ec89f8-metrics-certs\") pod \"controller-f8648f98b-rbzfs\" (UID: \"5348f01f-774a-4d17-9a26-ddc251ec89f8\") " pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.498171 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-memberlist\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:11 crc kubenswrapper[4912]: E1208 21:36:11.498285 4912 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 08 21:36:11 crc kubenswrapper[4912]: E1208 21:36:11.498328 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-memberlist podName:60cae323-cb71-49a6-90e4-7ac76e98ec75 nodeName:}" failed. No retries permitted until 2025-12-08 21:36:11.998314554 +0000 UTC m=+1053.861316637 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-memberlist") pod "speaker-xrxk5" (UID: "60cae323-cb71-49a6-90e4-7ac76e98ec75") : secret "metallb-memberlist" not found Dec 08 21:36:11 crc kubenswrapper[4912]: E1208 21:36:11.498533 4912 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 08 21:36:11 crc kubenswrapper[4912]: E1208 21:36:11.498561 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-metrics-certs podName:60cae323-cb71-49a6-90e4-7ac76e98ec75 nodeName:}" failed. No retries permitted until 2025-12-08 21:36:11.998554732 +0000 UTC m=+1053.861556805 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-metrics-certs") pod "speaker-xrxk5" (UID: "60cae323-cb71-49a6-90e4-7ac76e98ec75") : secret "speaker-certs-secret" not found Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.498630 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/60cae323-cb71-49a6-90e4-7ac76e98ec75-metallb-excludel2\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.503346 4912 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.503628 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5348f01f-774a-4d17-9a26-ddc251ec89f8-metrics-certs\") pod \"controller-f8648f98b-rbzfs\" (UID: \"5348f01f-774a-4d17-9a26-ddc251ec89f8\") " pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.514532 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5348f01f-774a-4d17-9a26-ddc251ec89f8-cert\") pod \"controller-f8648f98b-rbzfs\" (UID: \"5348f01f-774a-4d17-9a26-ddc251ec89f8\") " pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.528287 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2zdk\" (UniqueName: \"kubernetes.io/projected/5348f01f-774a-4d17-9a26-ddc251ec89f8-kube-api-access-t2zdk\") pod \"controller-f8648f98b-rbzfs\" (UID: \"5348f01f-774a-4d17-9a26-ddc251ec89f8\") " pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.528512 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-824br\" (UniqueName: \"kubernetes.io/projected/60cae323-cb71-49a6-90e4-7ac76e98ec75-kube-api-access-824br\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.642505 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.913831 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-metrics-certs\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.914201 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0b46dd74-03d1-4aa9-8bae-24cc229206aa-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-h6ntw\" (UID: \"0b46dd74-03d1-4aa9-8bae-24cc229206aa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.919711 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03a9518-39ca-436c-84ca-d02a8e6ef6d7-metrics-certs\") pod \"frr-k8s-tt6k7\" (UID: \"c03a9518-39ca-436c-84ca-d02a8e6ef6d7\") " pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:11 crc kubenswrapper[4912]: I1208 21:36:11.920202 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0b46dd74-03d1-4aa9-8bae-24cc229206aa-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-h6ntw\" (UID: \"0b46dd74-03d1-4aa9-8bae-24cc229206aa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" Dec 08 21:36:12 crc kubenswrapper[4912]: I1208 21:36:12.015133 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-metrics-certs\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:12 crc kubenswrapper[4912]: I1208 21:36:12.015180 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-memberlist\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:12 crc kubenswrapper[4912]: E1208 21:36:12.015395 4912 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 08 21:36:12 crc kubenswrapper[4912]: E1208 21:36:12.015473 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-memberlist podName:60cae323-cb71-49a6-90e4-7ac76e98ec75 nodeName:}" failed. No retries permitted until 2025-12-08 21:36:13.015452621 +0000 UTC m=+1054.878454694 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-memberlist") pod "speaker-xrxk5" (UID: "60cae323-cb71-49a6-90e4-7ac76e98ec75") : secret "metallb-memberlist" not found Dec 08 21:36:12 crc kubenswrapper[4912]: I1208 21:36:12.018082 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-metrics-certs\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:12 crc kubenswrapper[4912]: I1208 21:36:12.117891 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-rbzfs"] Dec 08 21:36:12 crc kubenswrapper[4912]: I1208 21:36:12.139863 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" Dec 08 21:36:12 crc kubenswrapper[4912]: I1208 21:36:12.162725 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:12 crc kubenswrapper[4912]: I1208 21:36:12.236781 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-rbzfs" event={"ID":"5348f01f-774a-4d17-9a26-ddc251ec89f8","Type":"ContainerStarted","Data":"b31286be4290653087c5c3a5e57a7c5403c47f603bb8e0c9705d4d02c0e3370a"} Dec 08 21:36:12 crc kubenswrapper[4912]: I1208 21:36:12.303996 4912 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 21:36:12 crc kubenswrapper[4912]: I1208 21:36:12.552153 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw"] Dec 08 21:36:12 crc kubenswrapper[4912]: W1208 21:36:12.555149 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b46dd74_03d1_4aa9_8bae_24cc229206aa.slice/crio-138401e0adf947276bb99383936ded3d85526ef3037c509c8c9524c568a9d970 WatchSource:0}: Error finding container 138401e0adf947276bb99383936ded3d85526ef3037c509c8c9524c568a9d970: Status 404 returned error can't find the container with id 138401e0adf947276bb99383936ded3d85526ef3037c509c8c9524c568a9d970 Dec 08 21:36:13 crc kubenswrapper[4912]: I1208 21:36:13.036831 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-memberlist\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:13 crc kubenswrapper[4912]: I1208 21:36:13.042809 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/60cae323-cb71-49a6-90e4-7ac76e98ec75-memberlist\") pod \"speaker-xrxk5\" (UID: \"60cae323-cb71-49a6-90e4-7ac76e98ec75\") " pod="metallb-system/speaker-xrxk5" Dec 08 21:36:13 crc kubenswrapper[4912]: I1208 21:36:13.129477 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-xrxk5" Dec 08 21:36:13 crc kubenswrapper[4912]: W1208 21:36:13.152260 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60cae323_cb71_49a6_90e4_7ac76e98ec75.slice/crio-352689eb6461f41470cae16412a41e12c5a5a8758885c74af0767ecedd3494d5 WatchSource:0}: Error finding container 352689eb6461f41470cae16412a41e12c5a5a8758885c74af0767ecedd3494d5: Status 404 returned error can't find the container with id 352689eb6461f41470cae16412a41e12c5a5a8758885c74af0767ecedd3494d5 Dec 08 21:36:13 crc kubenswrapper[4912]: I1208 21:36:13.254885 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-rbzfs" event={"ID":"5348f01f-774a-4d17-9a26-ddc251ec89f8","Type":"ContainerStarted","Data":"860e6d2c46b500392f9e4cf56e2cf12fda705391295af32748d5346749080fd6"} Dec 08 21:36:13 crc kubenswrapper[4912]: I1208 21:36:13.254945 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-rbzfs" event={"ID":"5348f01f-774a-4d17-9a26-ddc251ec89f8","Type":"ContainerStarted","Data":"ad00e815c5f82ba74915234b7c532ad532f4d06adea826560f086e903df55218"} Dec 08 21:36:13 crc kubenswrapper[4912]: I1208 21:36:13.255013 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:13 crc kubenswrapper[4912]: I1208 21:36:13.256412 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" event={"ID":"0b46dd74-03d1-4aa9-8bae-24cc229206aa","Type":"ContainerStarted","Data":"138401e0adf947276bb99383936ded3d85526ef3037c509c8c9524c568a9d970"} Dec 08 21:36:13 crc kubenswrapper[4912]: I1208 21:36:13.257708 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-xrxk5" event={"ID":"60cae323-cb71-49a6-90e4-7ac76e98ec75","Type":"ContainerStarted","Data":"352689eb6461f41470cae16412a41e12c5a5a8758885c74af0767ecedd3494d5"} Dec 08 21:36:13 crc kubenswrapper[4912]: I1208 21:36:13.258668 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-tt6k7" event={"ID":"c03a9518-39ca-436c-84ca-d02a8e6ef6d7","Type":"ContainerStarted","Data":"62a8fe0fe65757cf9459dad7f99a061dc2bcd4f27c42c78a126693939b5298ce"} Dec 08 21:36:13 crc kubenswrapper[4912]: I1208 21:36:13.276447 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-rbzfs" podStartSLOduration=2.276410115 podStartE2EDuration="2.276410115s" podCreationTimestamp="2025-12-08 21:36:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:36:13.271495771 +0000 UTC m=+1055.134497854" watchObservedRunningTime="2025-12-08 21:36:13.276410115 +0000 UTC m=+1055.139412198" Dec 08 21:36:14 crc kubenswrapper[4912]: I1208 21:36:14.270556 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-xrxk5" event={"ID":"60cae323-cb71-49a6-90e4-7ac76e98ec75","Type":"ContainerStarted","Data":"aee414d0e8220a3ce6c80da385db7b9ec7c75ec26689cd111ca895a81dc1a3ab"} Dec 08 21:36:14 crc kubenswrapper[4912]: I1208 21:36:14.271148 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-xrxk5" event={"ID":"60cae323-cb71-49a6-90e4-7ac76e98ec75","Type":"ContainerStarted","Data":"b9bbfdded7aea2afc269df71108d317d1e8c68ced2e0d2ccd60c1c52e8800304"} Dec 08 21:36:14 crc kubenswrapper[4912]: I1208 21:36:14.294397 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-xrxk5" podStartSLOduration=3.294381124 podStartE2EDuration="3.294381124s" podCreationTimestamp="2025-12-08 21:36:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:36:14.291620048 +0000 UTC m=+1056.154622131" watchObservedRunningTime="2025-12-08 21:36:14.294381124 +0000 UTC m=+1056.157383207" Dec 08 21:36:15 crc kubenswrapper[4912]: I1208 21:36:15.277415 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-xrxk5" Dec 08 21:36:22 crc kubenswrapper[4912]: I1208 21:36:22.335776 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" event={"ID":"0b46dd74-03d1-4aa9-8bae-24cc229206aa","Type":"ContainerStarted","Data":"85d7b7a80538ce717616a3ecac3913b91cdf334331cca4325929f0caeceea316"} Dec 08 21:36:22 crc kubenswrapper[4912]: I1208 21:36:22.337170 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" Dec 08 21:36:22 crc kubenswrapper[4912]: I1208 21:36:22.339230 4912 generic.go:334] "Generic (PLEG): container finished" podID="c03a9518-39ca-436c-84ca-d02a8e6ef6d7" containerID="57c11de3798e8927dd9749d6a9bb54af13c52b2faaeb098a84bb5d102c74e8c8" exitCode=0 Dec 08 21:36:22 crc kubenswrapper[4912]: I1208 21:36:22.339298 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-tt6k7" event={"ID":"c03a9518-39ca-436c-84ca-d02a8e6ef6d7","Type":"ContainerDied","Data":"57c11de3798e8927dd9749d6a9bb54af13c52b2faaeb098a84bb5d102c74e8c8"} Dec 08 21:36:22 crc kubenswrapper[4912]: I1208 21:36:22.354201 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" podStartSLOduration=2.382321672 podStartE2EDuration="11.353947881s" podCreationTimestamp="2025-12-08 21:36:11 +0000 UTC" firstStartedPulling="2025-12-08 21:36:12.557746877 +0000 UTC m=+1054.420748970" lastFinishedPulling="2025-12-08 21:36:21.529373096 +0000 UTC m=+1063.392375179" observedRunningTime="2025-12-08 21:36:22.348981795 +0000 UTC m=+1064.211983888" watchObservedRunningTime="2025-12-08 21:36:22.353947881 +0000 UTC m=+1064.216949964" Dec 08 21:36:23 crc kubenswrapper[4912]: I1208 21:36:23.134383 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-xrxk5" Dec 08 21:36:23 crc kubenswrapper[4912]: I1208 21:36:23.348617 4912 generic.go:334] "Generic (PLEG): container finished" podID="c03a9518-39ca-436c-84ca-d02a8e6ef6d7" containerID="dcad6e1420113f3dab1d92147948372645a061d50ffa435fc795c17173f66ce3" exitCode=0 Dec 08 21:36:23 crc kubenswrapper[4912]: I1208 21:36:23.348723 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-tt6k7" event={"ID":"c03a9518-39ca-436c-84ca-d02a8e6ef6d7","Type":"ContainerDied","Data":"dcad6e1420113f3dab1d92147948372645a061d50ffa435fc795c17173f66ce3"} Dec 08 21:36:24 crc kubenswrapper[4912]: I1208 21:36:24.358254 4912 generic.go:334] "Generic (PLEG): container finished" podID="c03a9518-39ca-436c-84ca-d02a8e6ef6d7" containerID="11df65da84a71b0f354a62991130c3008b6e48d652b81c652f7b3f07a728a0c0" exitCode=0 Dec 08 21:36:24 crc kubenswrapper[4912]: I1208 21:36:24.358335 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-tt6k7" event={"ID":"c03a9518-39ca-436c-84ca-d02a8e6ef6d7","Type":"ContainerDied","Data":"11df65da84a71b0f354a62991130c3008b6e48d652b81c652f7b3f07a728a0c0"} Dec 08 21:36:25 crc kubenswrapper[4912]: I1208 21:36:25.367820 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-tt6k7" event={"ID":"c03a9518-39ca-436c-84ca-d02a8e6ef6d7","Type":"ContainerStarted","Data":"d75d98e5cfa7b17b60e7700efc6e804aa0b3a58c3d807efc724cfeda3b2c5078"} Dec 08 21:36:25 crc kubenswrapper[4912]: I1208 21:36:25.367880 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-tt6k7" event={"ID":"c03a9518-39ca-436c-84ca-d02a8e6ef6d7","Type":"ContainerStarted","Data":"e114979bb537b41218e12e99c12dc75ae574afc5f6deff520180ffa113aafdda"} Dec 08 21:36:25 crc kubenswrapper[4912]: I1208 21:36:25.367897 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-tt6k7" event={"ID":"c03a9518-39ca-436c-84ca-d02a8e6ef6d7","Type":"ContainerStarted","Data":"6904a3579c1d363c98050a78c4a1c1c6c44a0c883dc061c0eb6baa59859e6cd0"} Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.381068 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-tt6k7" event={"ID":"c03a9518-39ca-436c-84ca-d02a8e6ef6d7","Type":"ContainerStarted","Data":"cc8626327072ba5d887d677f9f67be6d333d0b2d654838eb2c57bb41dbc34813"} Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.381450 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-tt6k7" event={"ID":"c03a9518-39ca-436c-84ca-d02a8e6ef6d7","Type":"ContainerStarted","Data":"dc19cde0194f55649d3f608fdfaf376a8ed41b15d21586ba189c8f6b9c6d4613"} Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.381468 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-tt6k7" event={"ID":"c03a9518-39ca-436c-84ca-d02a8e6ef6d7","Type":"ContainerStarted","Data":"ea93d17c841cd5140e93d35009061e53c16397daaaeda4c118cf86a8471e512e"} Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.382657 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.399555 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-rl4bc"] Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.400697 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rl4bc" Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.409557 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-bl7fm" Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.416343 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.417232 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.443107 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-rl4bc"] Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.443955 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-tt6k7" podStartSLOduration=6.240406226 podStartE2EDuration="15.443940311s" podCreationTimestamp="2025-12-08 21:36:11 +0000 UTC" firstStartedPulling="2025-12-08 21:36:12.303482899 +0000 UTC m=+1054.166484982" lastFinishedPulling="2025-12-08 21:36:21.507016984 +0000 UTC m=+1063.370019067" observedRunningTime="2025-12-08 21:36:26.424143429 +0000 UTC m=+1068.287145512" watchObservedRunningTime="2025-12-08 21:36:26.443940311 +0000 UTC m=+1068.306942394" Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.520076 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fncgf\" (UniqueName: \"kubernetes.io/projected/bdb11bcd-dffc-420e-ab0c-b496835c3a92-kube-api-access-fncgf\") pod \"openstack-operator-index-rl4bc\" (UID: \"bdb11bcd-dffc-420e-ab0c-b496835c3a92\") " pod="openstack-operators/openstack-operator-index-rl4bc" Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.621572 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fncgf\" (UniqueName: \"kubernetes.io/projected/bdb11bcd-dffc-420e-ab0c-b496835c3a92-kube-api-access-fncgf\") pod \"openstack-operator-index-rl4bc\" (UID: \"bdb11bcd-dffc-420e-ab0c-b496835c3a92\") " pod="openstack-operators/openstack-operator-index-rl4bc" Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.642004 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fncgf\" (UniqueName: \"kubernetes.io/projected/bdb11bcd-dffc-420e-ab0c-b496835c3a92-kube-api-access-fncgf\") pod \"openstack-operator-index-rl4bc\" (UID: \"bdb11bcd-dffc-420e-ab0c-b496835c3a92\") " pod="openstack-operators/openstack-operator-index-rl4bc" Dec 08 21:36:26 crc kubenswrapper[4912]: I1208 21:36:26.740934 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rl4bc" Dec 08 21:36:27 crc kubenswrapper[4912]: I1208 21:36:27.164101 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:27 crc kubenswrapper[4912]: I1208 21:36:27.216573 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:27 crc kubenswrapper[4912]: I1208 21:36:27.327753 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-rl4bc"] Dec 08 21:36:27 crc kubenswrapper[4912]: W1208 21:36:27.332455 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbdb11bcd_dffc_420e_ab0c_b496835c3a92.slice/crio-d1db512cbe4459ae9cf0b5afe4f8ac60f5b4bd0db9cfac9258fa77a957e59fcc WatchSource:0}: Error finding container d1db512cbe4459ae9cf0b5afe4f8ac60f5b4bd0db9cfac9258fa77a957e59fcc: Status 404 returned error can't find the container with id d1db512cbe4459ae9cf0b5afe4f8ac60f5b4bd0db9cfac9258fa77a957e59fcc Dec 08 21:36:27 crc kubenswrapper[4912]: I1208 21:36:27.394977 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rl4bc" event={"ID":"bdb11bcd-dffc-420e-ab0c-b496835c3a92","Type":"ContainerStarted","Data":"d1db512cbe4459ae9cf0b5afe4f8ac60f5b4bd0db9cfac9258fa77a957e59fcc"} Dec 08 21:36:29 crc kubenswrapper[4912]: I1208 21:36:29.166661 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-rl4bc"] Dec 08 21:36:29 crc kubenswrapper[4912]: I1208 21:36:29.777383 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-dg4gd"] Dec 08 21:36:29 crc kubenswrapper[4912]: I1208 21:36:29.778376 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-dg4gd" Dec 08 21:36:29 crc kubenswrapper[4912]: I1208 21:36:29.787838 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-dg4gd"] Dec 08 21:36:29 crc kubenswrapper[4912]: I1208 21:36:29.876544 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cm64\" (UniqueName: \"kubernetes.io/projected/960d8c65-06be-4c35-8529-f1a8b7440b1d-kube-api-access-5cm64\") pod \"openstack-operator-index-dg4gd\" (UID: \"960d8c65-06be-4c35-8529-f1a8b7440b1d\") " pod="openstack-operators/openstack-operator-index-dg4gd" Dec 08 21:36:29 crc kubenswrapper[4912]: I1208 21:36:29.978080 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cm64\" (UniqueName: \"kubernetes.io/projected/960d8c65-06be-4c35-8529-f1a8b7440b1d-kube-api-access-5cm64\") pod \"openstack-operator-index-dg4gd\" (UID: \"960d8c65-06be-4c35-8529-f1a8b7440b1d\") " pod="openstack-operators/openstack-operator-index-dg4gd" Dec 08 21:36:30 crc kubenswrapper[4912]: I1208 21:36:30.015221 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cm64\" (UniqueName: \"kubernetes.io/projected/960d8c65-06be-4c35-8529-f1a8b7440b1d-kube-api-access-5cm64\") pod \"openstack-operator-index-dg4gd\" (UID: \"960d8c65-06be-4c35-8529-f1a8b7440b1d\") " pod="openstack-operators/openstack-operator-index-dg4gd" Dec 08 21:36:30 crc kubenswrapper[4912]: I1208 21:36:30.109328 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-dg4gd" Dec 08 21:36:30 crc kubenswrapper[4912]: I1208 21:36:30.456120 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rl4bc" event={"ID":"bdb11bcd-dffc-420e-ab0c-b496835c3a92","Type":"ContainerStarted","Data":"4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a"} Dec 08 21:36:30 crc kubenswrapper[4912]: I1208 21:36:30.456418 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-rl4bc" podUID="bdb11bcd-dffc-420e-ab0c-b496835c3a92" containerName="registry-server" containerID="cri-o://4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a" gracePeriod=2 Dec 08 21:36:30 crc kubenswrapper[4912]: I1208 21:36:30.477382 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-rl4bc" podStartSLOduration=1.714318412 podStartE2EDuration="4.477344254s" podCreationTimestamp="2025-12-08 21:36:26 +0000 UTC" firstStartedPulling="2025-12-08 21:36:27.340162457 +0000 UTC m=+1069.203164540" lastFinishedPulling="2025-12-08 21:36:30.103188299 +0000 UTC m=+1071.966190382" observedRunningTime="2025-12-08 21:36:30.473231894 +0000 UTC m=+1072.336233977" watchObservedRunningTime="2025-12-08 21:36:30.477344254 +0000 UTC m=+1072.340346337" Dec 08 21:36:30 crc kubenswrapper[4912]: I1208 21:36:30.525960 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-dg4gd"] Dec 08 21:36:30 crc kubenswrapper[4912]: W1208 21:36:30.549445 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod960d8c65_06be_4c35_8529_f1a8b7440b1d.slice/crio-5d177adeab1c825d348726538911c2c06712144ace1e652461bae79cc23b8492 WatchSource:0}: Error finding container 5d177adeab1c825d348726538911c2c06712144ace1e652461bae79cc23b8492: Status 404 returned error can't find the container with id 5d177adeab1c825d348726538911c2c06712144ace1e652461bae79cc23b8492 Dec 08 21:36:30 crc kubenswrapper[4912]: I1208 21:36:30.814974 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rl4bc" Dec 08 21:36:30 crc kubenswrapper[4912]: I1208 21:36:30.891019 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fncgf\" (UniqueName: \"kubernetes.io/projected/bdb11bcd-dffc-420e-ab0c-b496835c3a92-kube-api-access-fncgf\") pod \"bdb11bcd-dffc-420e-ab0c-b496835c3a92\" (UID: \"bdb11bcd-dffc-420e-ab0c-b496835c3a92\") " Dec 08 21:36:30 crc kubenswrapper[4912]: I1208 21:36:30.899469 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdb11bcd-dffc-420e-ab0c-b496835c3a92-kube-api-access-fncgf" (OuterVolumeSpecName: "kube-api-access-fncgf") pod "bdb11bcd-dffc-420e-ab0c-b496835c3a92" (UID: "bdb11bcd-dffc-420e-ab0c-b496835c3a92"). InnerVolumeSpecName "kube-api-access-fncgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:36:30 crc kubenswrapper[4912]: I1208 21:36:30.993854 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fncgf\" (UniqueName: \"kubernetes.io/projected/bdb11bcd-dffc-420e-ab0c-b496835c3a92-kube-api-access-fncgf\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.467523 4912 generic.go:334] "Generic (PLEG): container finished" podID="bdb11bcd-dffc-420e-ab0c-b496835c3a92" containerID="4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a" exitCode=0 Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.467597 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rl4bc" Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.467669 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rl4bc" event={"ID":"bdb11bcd-dffc-420e-ab0c-b496835c3a92","Type":"ContainerDied","Data":"4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a"} Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.467722 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rl4bc" event={"ID":"bdb11bcd-dffc-420e-ab0c-b496835c3a92","Type":"ContainerDied","Data":"d1db512cbe4459ae9cf0b5afe4f8ac60f5b4bd0db9cfac9258fa77a957e59fcc"} Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.467748 4912 scope.go:117] "RemoveContainer" containerID="4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a" Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.469924 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-dg4gd" event={"ID":"960d8c65-06be-4c35-8529-f1a8b7440b1d","Type":"ContainerStarted","Data":"e929dd46a08356459893280c0ad38eb5f03bad61a868f3c5fce085070a4b6063"} Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.469964 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-dg4gd" event={"ID":"960d8c65-06be-4c35-8529-f1a8b7440b1d","Type":"ContainerStarted","Data":"5d177adeab1c825d348726538911c2c06712144ace1e652461bae79cc23b8492"} Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.488861 4912 scope.go:117] "RemoveContainer" containerID="4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a" Dec 08 21:36:31 crc kubenswrapper[4912]: E1208 21:36:31.489391 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a\": container with ID starting with 4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a not found: ID does not exist" containerID="4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a" Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.489430 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a"} err="failed to get container status \"4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a\": rpc error: code = NotFound desc = could not find container \"4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a\": container with ID starting with 4288b6c7404d90d1d4ba805672e93d4b5e792b2833a71930a15e98c9b590c23a not found: ID does not exist" Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.494738 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-dg4gd" podStartSLOduration=2.447307164 podStartE2EDuration="2.494720504s" podCreationTimestamp="2025-12-08 21:36:29 +0000 UTC" firstStartedPulling="2025-12-08 21:36:30.553470345 +0000 UTC m=+1072.416472418" lastFinishedPulling="2025-12-08 21:36:30.600883675 +0000 UTC m=+1072.463885758" observedRunningTime="2025-12-08 21:36:31.490296775 +0000 UTC m=+1073.353298868" watchObservedRunningTime="2025-12-08 21:36:31.494720504 +0000 UTC m=+1073.357722587" Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.507766 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-rl4bc"] Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.513632 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-rl4bc"] Dec 08 21:36:31 crc kubenswrapper[4912]: I1208 21:36:31.646499 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-rbzfs" Dec 08 21:36:32 crc kubenswrapper[4912]: I1208 21:36:32.185127 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-h6ntw" Dec 08 21:36:32 crc kubenswrapper[4912]: I1208 21:36:32.436376 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdb11bcd-dffc-420e-ab0c-b496835c3a92" path="/var/lib/kubelet/pods/bdb11bcd-dffc-420e-ab0c-b496835c3a92/volumes" Dec 08 21:36:40 crc kubenswrapper[4912]: I1208 21:36:40.109758 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-dg4gd" Dec 08 21:36:40 crc kubenswrapper[4912]: I1208 21:36:40.110180 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-dg4gd" Dec 08 21:36:40 crc kubenswrapper[4912]: I1208 21:36:40.141240 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-dg4gd" Dec 08 21:36:40 crc kubenswrapper[4912]: I1208 21:36:40.550511 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-dg4gd" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.608531 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t"] Dec 08 21:36:41 crc kubenswrapper[4912]: E1208 21:36:41.608784 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdb11bcd-dffc-420e-ab0c-b496835c3a92" containerName="registry-server" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.608795 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdb11bcd-dffc-420e-ab0c-b496835c3a92" containerName="registry-server" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.608922 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdb11bcd-dffc-420e-ab0c-b496835c3a92" containerName="registry-server" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.609793 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.611700 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-zxxs6" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.619398 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t"] Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.743814 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb2z6\" (UniqueName: \"kubernetes.io/projected/ef89f58e-50c9-488e-bed0-bcfaac1e2851-kube-api-access-hb2z6\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t\" (UID: \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.743877 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef89f58e-50c9-488e-bed0-bcfaac1e2851-bundle\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t\" (UID: \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.743901 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef89f58e-50c9-488e-bed0-bcfaac1e2851-util\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t\" (UID: \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.845000 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb2z6\" (UniqueName: \"kubernetes.io/projected/ef89f58e-50c9-488e-bed0-bcfaac1e2851-kube-api-access-hb2z6\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t\" (UID: \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.845096 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef89f58e-50c9-488e-bed0-bcfaac1e2851-bundle\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t\" (UID: \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.845136 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef89f58e-50c9-488e-bed0-bcfaac1e2851-util\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t\" (UID: \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.845842 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef89f58e-50c9-488e-bed0-bcfaac1e2851-util\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t\" (UID: \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.846097 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef89f58e-50c9-488e-bed0-bcfaac1e2851-bundle\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t\" (UID: \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.864193 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb2z6\" (UniqueName: \"kubernetes.io/projected/ef89f58e-50c9-488e-bed0-bcfaac1e2851-kube-api-access-hb2z6\") pod \"6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t\" (UID: \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\") " pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:41 crc kubenswrapper[4912]: I1208 21:36:41.930593 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:42 crc kubenswrapper[4912]: I1208 21:36:42.165811 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-tt6k7" Dec 08 21:36:42 crc kubenswrapper[4912]: I1208 21:36:42.367456 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t"] Dec 08 21:36:42 crc kubenswrapper[4912]: W1208 21:36:42.388639 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef89f58e_50c9_488e_bed0_bcfaac1e2851.slice/crio-0c68b3526009c0b4db775406c80bd837759366ea6fd7bf07cfe7079ac86c22dc WatchSource:0}: Error finding container 0c68b3526009c0b4db775406c80bd837759366ea6fd7bf07cfe7079ac86c22dc: Status 404 returned error can't find the container with id 0c68b3526009c0b4db775406c80bd837759366ea6fd7bf07cfe7079ac86c22dc Dec 08 21:36:42 crc kubenswrapper[4912]: I1208 21:36:42.540173 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" event={"ID":"ef89f58e-50c9-488e-bed0-bcfaac1e2851","Type":"ContainerStarted","Data":"26305cdd8bd0d4b486a992512d141ed6d92579d643e2e7db387a08e5958e3d91"} Dec 08 21:36:42 crc kubenswrapper[4912]: I1208 21:36:42.540509 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" event={"ID":"ef89f58e-50c9-488e-bed0-bcfaac1e2851","Type":"ContainerStarted","Data":"0c68b3526009c0b4db775406c80bd837759366ea6fd7bf07cfe7079ac86c22dc"} Dec 08 21:36:43 crc kubenswrapper[4912]: I1208 21:36:43.552694 4912 generic.go:334] "Generic (PLEG): container finished" podID="ef89f58e-50c9-488e-bed0-bcfaac1e2851" containerID="26305cdd8bd0d4b486a992512d141ed6d92579d643e2e7db387a08e5958e3d91" exitCode=0 Dec 08 21:36:43 crc kubenswrapper[4912]: I1208 21:36:43.552830 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" event={"ID":"ef89f58e-50c9-488e-bed0-bcfaac1e2851","Type":"ContainerDied","Data":"26305cdd8bd0d4b486a992512d141ed6d92579d643e2e7db387a08e5958e3d91"} Dec 08 21:36:44 crc kubenswrapper[4912]: I1208 21:36:44.564559 4912 generic.go:334] "Generic (PLEG): container finished" podID="ef89f58e-50c9-488e-bed0-bcfaac1e2851" containerID="3a505f0191f556fcf0e04fd682b21ceea7258550d834e9a777ae318821828af1" exitCode=0 Dec 08 21:36:44 crc kubenswrapper[4912]: I1208 21:36:44.564686 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" event={"ID":"ef89f58e-50c9-488e-bed0-bcfaac1e2851","Type":"ContainerDied","Data":"3a505f0191f556fcf0e04fd682b21ceea7258550d834e9a777ae318821828af1"} Dec 08 21:36:45 crc kubenswrapper[4912]: I1208 21:36:45.575966 4912 generic.go:334] "Generic (PLEG): container finished" podID="ef89f58e-50c9-488e-bed0-bcfaac1e2851" containerID="731d20dc97e6c3001f1a203fd88d167387e51f12a292d97ae6cffe2848796158" exitCode=0 Dec 08 21:36:45 crc kubenswrapper[4912]: I1208 21:36:45.576071 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" event={"ID":"ef89f58e-50c9-488e-bed0-bcfaac1e2851","Type":"ContainerDied","Data":"731d20dc97e6c3001f1a203fd88d167387e51f12a292d97ae6cffe2848796158"} Dec 08 21:36:46 crc kubenswrapper[4912]: I1208 21:36:46.865761 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:46 crc kubenswrapper[4912]: I1208 21:36:46.924140 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef89f58e-50c9-488e-bed0-bcfaac1e2851-util\") pod \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\" (UID: \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\") " Dec 08 21:36:46 crc kubenswrapper[4912]: I1208 21:36:46.924320 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef89f58e-50c9-488e-bed0-bcfaac1e2851-bundle\") pod \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\" (UID: \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\") " Dec 08 21:36:46 crc kubenswrapper[4912]: I1208 21:36:46.924370 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb2z6\" (UniqueName: \"kubernetes.io/projected/ef89f58e-50c9-488e-bed0-bcfaac1e2851-kube-api-access-hb2z6\") pod \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\" (UID: \"ef89f58e-50c9-488e-bed0-bcfaac1e2851\") " Dec 08 21:36:46 crc kubenswrapper[4912]: I1208 21:36:46.925468 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef89f58e-50c9-488e-bed0-bcfaac1e2851-bundle" (OuterVolumeSpecName: "bundle") pod "ef89f58e-50c9-488e-bed0-bcfaac1e2851" (UID: "ef89f58e-50c9-488e-bed0-bcfaac1e2851"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:36:46 crc kubenswrapper[4912]: I1208 21:36:46.932309 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef89f58e-50c9-488e-bed0-bcfaac1e2851-kube-api-access-hb2z6" (OuterVolumeSpecName: "kube-api-access-hb2z6") pod "ef89f58e-50c9-488e-bed0-bcfaac1e2851" (UID: "ef89f58e-50c9-488e-bed0-bcfaac1e2851"). InnerVolumeSpecName "kube-api-access-hb2z6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:36:46 crc kubenswrapper[4912]: I1208 21:36:46.947554 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef89f58e-50c9-488e-bed0-bcfaac1e2851-util" (OuterVolumeSpecName: "util") pod "ef89f58e-50c9-488e-bed0-bcfaac1e2851" (UID: "ef89f58e-50c9-488e-bed0-bcfaac1e2851"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:36:47 crc kubenswrapper[4912]: I1208 21:36:47.025891 4912 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef89f58e-50c9-488e-bed0-bcfaac1e2851-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:47 crc kubenswrapper[4912]: I1208 21:36:47.026209 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb2z6\" (UniqueName: \"kubernetes.io/projected/ef89f58e-50c9-488e-bed0-bcfaac1e2851-kube-api-access-hb2z6\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:47 crc kubenswrapper[4912]: I1208 21:36:47.026273 4912 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef89f58e-50c9-488e-bed0-bcfaac1e2851-util\") on node \"crc\" DevicePath \"\"" Dec 08 21:36:47 crc kubenswrapper[4912]: I1208 21:36:47.591677 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" event={"ID":"ef89f58e-50c9-488e-bed0-bcfaac1e2851","Type":"ContainerDied","Data":"0c68b3526009c0b4db775406c80bd837759366ea6fd7bf07cfe7079ac86c22dc"} Dec 08 21:36:47 crc kubenswrapper[4912]: I1208 21:36:47.591710 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c68b3526009c0b4db775406c80bd837759366ea6fd7bf07cfe7079ac86c22dc" Dec 08 21:36:47 crc kubenswrapper[4912]: I1208 21:36:47.592145 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t" Dec 08 21:36:53 crc kubenswrapper[4912]: I1208 21:36:53.961236 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv"] Dec 08 21:36:53 crc kubenswrapper[4912]: E1208 21:36:53.962943 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef89f58e-50c9-488e-bed0-bcfaac1e2851" containerName="util" Dec 08 21:36:53 crc kubenswrapper[4912]: I1208 21:36:53.963010 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef89f58e-50c9-488e-bed0-bcfaac1e2851" containerName="util" Dec 08 21:36:53 crc kubenswrapper[4912]: E1208 21:36:53.963096 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef89f58e-50c9-488e-bed0-bcfaac1e2851" containerName="extract" Dec 08 21:36:53 crc kubenswrapper[4912]: I1208 21:36:53.963149 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef89f58e-50c9-488e-bed0-bcfaac1e2851" containerName="extract" Dec 08 21:36:53 crc kubenswrapper[4912]: E1208 21:36:53.963244 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef89f58e-50c9-488e-bed0-bcfaac1e2851" containerName="pull" Dec 08 21:36:53 crc kubenswrapper[4912]: I1208 21:36:53.963303 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef89f58e-50c9-488e-bed0-bcfaac1e2851" containerName="pull" Dec 08 21:36:53 crc kubenswrapper[4912]: I1208 21:36:53.963465 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef89f58e-50c9-488e-bed0-bcfaac1e2851" containerName="extract" Dec 08 21:36:53 crc kubenswrapper[4912]: I1208 21:36:53.963973 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv" Dec 08 21:36:53 crc kubenswrapper[4912]: I1208 21:36:53.967543 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-vdb6l" Dec 08 21:36:53 crc kubenswrapper[4912]: I1208 21:36:53.984245 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv"] Dec 08 21:36:54 crc kubenswrapper[4912]: I1208 21:36:54.034972 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6cxk\" (UniqueName: \"kubernetes.io/projected/121fb516-7dae-4a50-ac6c-6d0bf7781dce-kube-api-access-h6cxk\") pod \"openstack-operator-controller-operator-7d4449658c-rc2sv\" (UID: \"121fb516-7dae-4a50-ac6c-6d0bf7781dce\") " pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv" Dec 08 21:36:54 crc kubenswrapper[4912]: I1208 21:36:54.136489 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6cxk\" (UniqueName: \"kubernetes.io/projected/121fb516-7dae-4a50-ac6c-6d0bf7781dce-kube-api-access-h6cxk\") pod \"openstack-operator-controller-operator-7d4449658c-rc2sv\" (UID: \"121fb516-7dae-4a50-ac6c-6d0bf7781dce\") " pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv" Dec 08 21:36:54 crc kubenswrapper[4912]: I1208 21:36:54.156835 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6cxk\" (UniqueName: \"kubernetes.io/projected/121fb516-7dae-4a50-ac6c-6d0bf7781dce-kube-api-access-h6cxk\") pod \"openstack-operator-controller-operator-7d4449658c-rc2sv\" (UID: \"121fb516-7dae-4a50-ac6c-6d0bf7781dce\") " pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv" Dec 08 21:36:54 crc kubenswrapper[4912]: I1208 21:36:54.295732 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv" Dec 08 21:36:54 crc kubenswrapper[4912]: I1208 21:36:54.563939 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv"] Dec 08 21:36:54 crc kubenswrapper[4912]: I1208 21:36:54.674218 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv" event={"ID":"121fb516-7dae-4a50-ac6c-6d0bf7781dce","Type":"ContainerStarted","Data":"6a3b49d60bacac62a89f4e9209d8b7f251bcf01065b648841339aaa7f2e05da3"} Dec 08 21:36:59 crc kubenswrapper[4912]: I1208 21:36:59.710336 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv" event={"ID":"121fb516-7dae-4a50-ac6c-6d0bf7781dce","Type":"ContainerStarted","Data":"e16c791439577a48c7b9a4d5e59147511537816eb2e2069bb6e964065bb44fcb"} Dec 08 21:36:59 crc kubenswrapper[4912]: I1208 21:36:59.710742 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv" Dec 08 21:36:59 crc kubenswrapper[4912]: I1208 21:36:59.745606 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv" podStartSLOduration=2.706450686 podStartE2EDuration="6.745584452s" podCreationTimestamp="2025-12-08 21:36:53 +0000 UTC" firstStartedPulling="2025-12-08 21:36:54.56192104 +0000 UTC m=+1096.424923123" lastFinishedPulling="2025-12-08 21:36:58.601054806 +0000 UTC m=+1100.464056889" observedRunningTime="2025-12-08 21:36:59.73898263 +0000 UTC m=+1101.601984713" watchObservedRunningTime="2025-12-08 21:36:59.745584452 +0000 UTC m=+1101.608586535" Dec 08 21:37:04 crc kubenswrapper[4912]: I1208 21:37:04.299534 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7d4449658c-rc2sv" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.033846 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.035849 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.040795 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.042233 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.042825 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-9p45v" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.044004 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-975gv" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.053062 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.054442 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.058486 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-qp7qg" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.059612 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.069382 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.088442 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.089841 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.096189 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.097530 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.102798 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-lc7nl" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.103136 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-5tz6g" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.120091 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.132238 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.150979 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4rgd\" (UniqueName: \"kubernetes.io/projected/ce1d46fe-d3fc-4386-a545-3e4513ca68c3-kube-api-access-g4rgd\") pod \"glance-operator-controller-manager-5697bb5779-54mxl\" (UID: \"ce1d46fe-d3fc-4386-a545-3e4513ca68c3\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.151051 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6lp5\" (UniqueName: \"kubernetes.io/projected/b4ebbd1e-bbd8-4290-8745-ed80c67abf66-kube-api-access-v6lp5\") pod \"barbican-operator-controller-manager-7d9dfd778-tsz7z\" (UID: \"b4ebbd1e-bbd8-4290-8745-ed80c67abf66\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.151117 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wklrj\" (UniqueName: \"kubernetes.io/projected/b95889d5-9404-4bc5-867a-5bf1492855db-kube-api-access-wklrj\") pod \"heat-operator-controller-manager-5f64f6f8bb-dqmmb\" (UID: \"b95889d5-9404-4bc5-867a-5bf1492855db\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.151147 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spr54\" (UniqueName: \"kubernetes.io/projected/0bf34a60-0aa4-4408-84f8-7848cf76086f-kube-api-access-spr54\") pod \"designate-operator-controller-manager-697fb699cf-plw7z\" (UID: \"0bf34a60-0aa4-4408-84f8-7848cf76086f\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.151170 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxd56\" (UniqueName: \"kubernetes.io/projected/b47b551b-07c5-4fc7-b6a9-76208870148f-kube-api-access-sxd56\") pod \"cinder-operator-controller-manager-6c677c69b-mpssj\" (UID: \"b47b551b-07c5-4fc7-b6a9-76208870148f\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.151242 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.156897 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.158092 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.164102 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.171306 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-npklk" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.173460 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.175115 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.181445 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-92jhm" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.181663 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.211120 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.221221 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.222343 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.226629 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.227743 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-c527p" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.239442 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.240730 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.250467 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-cz8zc" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.250510 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.251922 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.255664 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-n7z9d" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.259918 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wklrj\" (UniqueName: \"kubernetes.io/projected/b95889d5-9404-4bc5-867a-5bf1492855db-kube-api-access-wklrj\") pod \"heat-operator-controller-manager-5f64f6f8bb-dqmmb\" (UID: \"b95889d5-9404-4bc5-867a-5bf1492855db\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.259990 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8rdf\" (UniqueName: \"kubernetes.io/projected/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-kube-api-access-c8rdf\") pod \"infra-operator-controller-manager-78d48bff9d-nxm5d\" (UID: \"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.260065 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spr54\" (UniqueName: \"kubernetes.io/projected/0bf34a60-0aa4-4408-84f8-7848cf76086f-kube-api-access-spr54\") pod \"designate-operator-controller-manager-697fb699cf-plw7z\" (UID: \"0bf34a60-0aa4-4408-84f8-7848cf76086f\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.260119 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxd56\" (UniqueName: \"kubernetes.io/projected/b47b551b-07c5-4fc7-b6a9-76208870148f-kube-api-access-sxd56\") pod \"cinder-operator-controller-manager-6c677c69b-mpssj\" (UID: \"b47b551b-07c5-4fc7-b6a9-76208870148f\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.260189 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert\") pod \"infra-operator-controller-manager-78d48bff9d-nxm5d\" (UID: \"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.260250 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkq6x\" (UniqueName: \"kubernetes.io/projected/0e45e244-50c1-4b0b-8e49-615f31b2cf2c-kube-api-access-mkq6x\") pod \"horizon-operator-controller-manager-68c6d99b8f-r9g4v\" (UID: \"0e45e244-50c1-4b0b-8e49-615f31b2cf2c\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.260275 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4rgd\" (UniqueName: \"kubernetes.io/projected/ce1d46fe-d3fc-4386-a545-3e4513ca68c3-kube-api-access-g4rgd\") pod \"glance-operator-controller-manager-5697bb5779-54mxl\" (UID: \"ce1d46fe-d3fc-4386-a545-3e4513ca68c3\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.260309 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6lp5\" (UniqueName: \"kubernetes.io/projected/b4ebbd1e-bbd8-4290-8745-ed80c67abf66-kube-api-access-v6lp5\") pod \"barbican-operator-controller-manager-7d9dfd778-tsz7z\" (UID: \"b4ebbd1e-bbd8-4290-8745-ed80c67abf66\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.262374 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.270097 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.320971 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spr54\" (UniqueName: \"kubernetes.io/projected/0bf34a60-0aa4-4408-84f8-7848cf76086f-kube-api-access-spr54\") pod \"designate-operator-controller-manager-697fb699cf-plw7z\" (UID: \"0bf34a60-0aa4-4408-84f8-7848cf76086f\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.326399 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6lp5\" (UniqueName: \"kubernetes.io/projected/b4ebbd1e-bbd8-4290-8745-ed80c67abf66-kube-api-access-v6lp5\") pod \"barbican-operator-controller-manager-7d9dfd778-tsz7z\" (UID: \"b4ebbd1e-bbd8-4290-8745-ed80c67abf66\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.326710 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.326733 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4rgd\" (UniqueName: \"kubernetes.io/projected/ce1d46fe-d3fc-4386-a545-3e4513ca68c3-kube-api-access-g4rgd\") pod \"glance-operator-controller-manager-5697bb5779-54mxl\" (UID: \"ce1d46fe-d3fc-4386-a545-3e4513ca68c3\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.336715 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wklrj\" (UniqueName: \"kubernetes.io/projected/b95889d5-9404-4bc5-867a-5bf1492855db-kube-api-access-wklrj\") pod \"heat-operator-controller-manager-5f64f6f8bb-dqmmb\" (UID: \"b95889d5-9404-4bc5-867a-5bf1492855db\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.378090 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.383627 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.384980 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxd56\" (UniqueName: \"kubernetes.io/projected/b47b551b-07c5-4fc7-b6a9-76208870148f-kube-api-access-sxd56\") pod \"cinder-operator-controller-manager-6c677c69b-mpssj\" (UID: \"b47b551b-07c5-4fc7-b6a9-76208870148f\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.385437 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8rdf\" (UniqueName: \"kubernetes.io/projected/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-kube-api-access-c8rdf\") pod \"infra-operator-controller-manager-78d48bff9d-nxm5d\" (UID: \"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.385474 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8zpg\" (UniqueName: \"kubernetes.io/projected/a8da65eb-3b52-473d-93c2-da58da0d0cfc-kube-api-access-c8zpg\") pod \"keystone-operator-controller-manager-7765d96ddf-hfdwp\" (UID: \"a8da65eb-3b52-473d-93c2-da58da0d0cfc\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.385517 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert\") pod \"infra-operator-controller-manager-78d48bff9d-nxm5d\" (UID: \"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.385546 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztq75\" (UniqueName: \"kubernetes.io/projected/ef95618f-ec2f-438b-ba4f-15cb5e057b6e-kube-api-access-ztq75\") pod \"ironic-operator-controller-manager-967d97867-fmxw5\" (UID: \"ef95618f-ec2f-438b-ba4f-15cb5e057b6e\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.385566 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkq6x\" (UniqueName: \"kubernetes.io/projected/0e45e244-50c1-4b0b-8e49-615f31b2cf2c-kube-api-access-mkq6x\") pod \"horizon-operator-controller-manager-68c6d99b8f-r9g4v\" (UID: \"0e45e244-50c1-4b0b-8e49-615f31b2cf2c\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.385587 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z85zv\" (UniqueName: \"kubernetes.io/projected/dd47e823-cffc-4455-ae03-a29000d733ab-kube-api-access-z85zv\") pod \"manila-operator-controller-manager-5b5fd79c9c-xjlfc\" (UID: \"dd47e823-cffc-4455-ae03-a29000d733ab\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" Dec 08 21:37:24 crc kubenswrapper[4912]: E1208 21:37:24.385860 4912 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 08 21:37:24 crc kubenswrapper[4912]: E1208 21:37:24.385895 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert podName:27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c nodeName:}" failed. No retries permitted until 2025-12-08 21:37:24.885881944 +0000 UTC m=+1126.748884027 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert") pod "infra-operator-controller-manager-78d48bff9d-nxm5d" (UID: "27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c") : secret "infra-operator-webhook-server-cert" not found Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.411590 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.411895 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.422229 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.425738 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.427263 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.429505 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.440181 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.443510 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-npkpk" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.448915 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-fsq85" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.449224 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-p424t" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.481715 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8rdf\" (UniqueName: \"kubernetes.io/projected/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-kube-api-access-c8rdf\") pod \"infra-operator-controller-manager-78d48bff9d-nxm5d\" (UID: \"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.482797 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkq6x\" (UniqueName: \"kubernetes.io/projected/0e45e244-50c1-4b0b-8e49-615f31b2cf2c-kube-api-access-mkq6x\") pod \"horizon-operator-controller-manager-68c6d99b8f-r9g4v\" (UID: \"0e45e244-50c1-4b0b-8e49-615f31b2cf2c\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.498585 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.498623 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.509788 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hzlm\" (UniqueName: \"kubernetes.io/projected/d737be23-9586-4023-b01e-a9f7161b3b4c-kube-api-access-5hzlm\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-4gfv4\" (UID: \"d737be23-9586-4023-b01e-a9f7161b3b4c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.510065 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8zpg\" (UniqueName: \"kubernetes.io/projected/a8da65eb-3b52-473d-93c2-da58da0d0cfc-kube-api-access-c8zpg\") pod \"keystone-operator-controller-manager-7765d96ddf-hfdwp\" (UID: \"a8da65eb-3b52-473d-93c2-da58da0d0cfc\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.510117 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqh7t\" (UniqueName: \"kubernetes.io/projected/7967d486-bea2-4064-8fbd-658052c9ac9f-kube-api-access-bqh7t\") pod \"mariadb-operator-controller-manager-79c8c4686c-wpc67\" (UID: \"7967d486-bea2-4064-8fbd-658052c9ac9f\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.510167 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztq75\" (UniqueName: \"kubernetes.io/projected/ef95618f-ec2f-438b-ba4f-15cb5e057b6e-kube-api-access-ztq75\") pod \"ironic-operator-controller-manager-967d97867-fmxw5\" (UID: \"ef95618f-ec2f-438b-ba4f-15cb5e057b6e\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.510193 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z85zv\" (UniqueName: \"kubernetes.io/projected/dd47e823-cffc-4455-ae03-a29000d733ab-kube-api-access-z85zv\") pod \"manila-operator-controller-manager-5b5fd79c9c-xjlfc\" (UID: \"dd47e823-cffc-4455-ae03-a29000d733ab\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.515246 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.518840 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.540200 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.541248 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.548660 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-4l6xt" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.568453 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8zpg\" (UniqueName: \"kubernetes.io/projected/a8da65eb-3b52-473d-93c2-da58da0d0cfc-kube-api-access-c8zpg\") pod \"keystone-operator-controller-manager-7765d96ddf-hfdwp\" (UID: \"a8da65eb-3b52-473d-93c2-da58da0d0cfc\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.568530 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.573614 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z85zv\" (UniqueName: \"kubernetes.io/projected/dd47e823-cffc-4455-ae03-a29000d733ab-kube-api-access-z85zv\") pod \"manila-operator-controller-manager-5b5fd79c9c-xjlfc\" (UID: \"dd47e823-cffc-4455-ae03-a29000d733ab\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.581444 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztq75\" (UniqueName: \"kubernetes.io/projected/ef95618f-ec2f-438b-ba4f-15cb5e057b6e-kube-api-access-ztq75\") pod \"ironic-operator-controller-manager-967d97867-fmxw5\" (UID: \"ef95618f-ec2f-438b-ba4f-15cb5e057b6e\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.598780 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.611190 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.617489 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hzlm\" (UniqueName: \"kubernetes.io/projected/d737be23-9586-4023-b01e-a9f7161b3b4c-kube-api-access-5hzlm\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-4gfv4\" (UID: \"d737be23-9586-4023-b01e-a9f7161b3b4c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.617534 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfqzw\" (UniqueName: \"kubernetes.io/projected/92f66d4c-5b7c-4bc0-820d-3319fa35a16b-kube-api-access-hfqzw\") pod \"nova-operator-controller-manager-697bc559fc-qsllt\" (UID: \"92f66d4c-5b7c-4bc0-820d-3319fa35a16b\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.617728 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqh7t\" (UniqueName: \"kubernetes.io/projected/7967d486-bea2-4064-8fbd-658052c9ac9f-kube-api-access-bqh7t\") pod \"mariadb-operator-controller-manager-79c8c4686c-wpc67\" (UID: \"7967d486-bea2-4064-8fbd-658052c9ac9f\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.623511 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.628756 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.631142 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.638125 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-km6cr" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.651631 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hzlm\" (UniqueName: \"kubernetes.io/projected/d737be23-9586-4023-b01e-a9f7161b3b4c-kube-api-access-5hzlm\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-4gfv4\" (UID: \"d737be23-9586-4023-b01e-a9f7161b3b4c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.668412 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.684907 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqh7t\" (UniqueName: \"kubernetes.io/projected/7967d486-bea2-4064-8fbd-658052c9ac9f-kube-api-access-bqh7t\") pod \"mariadb-operator-controller-manager-79c8c4686c-wpc67\" (UID: \"7967d486-bea2-4064-8fbd-658052c9ac9f\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.708140 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.709492 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.715586 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-6tvhm" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.722829 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dplcb\" (UniqueName: \"kubernetes.io/projected/dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33-kube-api-access-dplcb\") pod \"octavia-operator-controller-manager-998648c74-9xkjf\" (UID: \"dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.722965 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfqzw\" (UniqueName: \"kubernetes.io/projected/92f66d4c-5b7c-4bc0-820d-3319fa35a16b-kube-api-access-hfqzw\") pod \"nova-operator-controller-manager-697bc559fc-qsllt\" (UID: \"92f66d4c-5b7c-4bc0-820d-3319fa35a16b\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.737478 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.738516 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.747683 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfqzw\" (UniqueName: \"kubernetes.io/projected/92f66d4c-5b7c-4bc0-820d-3319fa35a16b-kube-api-access-hfqzw\") pod \"nova-operator-controller-manager-697bc559fc-qsllt\" (UID: \"92f66d4c-5b7c-4bc0-820d-3319fa35a16b\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.748223 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-zbp6g" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.760248 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.761664 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.770519 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-962zt" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.776509 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.784713 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.803238 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.804489 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.804575 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.807652 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-sthmt" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.814601 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.830197 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f7p4sp\" (UID: \"cbe56e26-bee2-4664-abc8-2d7ff76aa32e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.830264 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dplcb\" (UniqueName: \"kubernetes.io/projected/dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33-kube-api-access-dplcb\") pod \"octavia-operator-controller-manager-998648c74-9xkjf\" (UID: \"dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.830351 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mmkk\" (UniqueName: \"kubernetes.io/projected/9c67d467-660a-4bbc-a32c-b197db949502-kube-api-access-4mmkk\") pod \"ovn-operator-controller-manager-b6456fdb6-zxmcp\" (UID: \"9c67d467-660a-4bbc-a32c-b197db949502\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.830397 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bjmv\" (UniqueName: \"kubernetes.io/projected/fb8b700a-18a4-49ce-86cb-a38e2ff4cb58-kube-api-access-9bjmv\") pod \"swift-operator-controller-manager-9d58d64bc-5s8dd\" (UID: \"fb8b700a-18a4-49ce-86cb-a38e2ff4cb58\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.830426 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxg2j\" (UniqueName: \"kubernetes.io/projected/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-kube-api-access-kxg2j\") pod \"openstack-baremetal-operator-controller-manager-84b575879f7p4sp\" (UID: \"cbe56e26-bee2-4664-abc8-2d7ff76aa32e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.840613 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.845084 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.863601 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.873180 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dplcb\" (UniqueName: \"kubernetes.io/projected/dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33-kube-api-access-dplcb\") pod \"octavia-operator-controller-manager-998648c74-9xkjf\" (UID: \"dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.879765 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.889719 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-sr59v"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.890989 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sr59v" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.895422 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-hqwnw" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.900699 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-sr59v"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.931430 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvdzw\" (UniqueName: \"kubernetes.io/projected/4cfa9728-de47-4dfa-96d9-53b1c591e650-kube-api-access-cvdzw\") pod \"placement-operator-controller-manager-78f8948974-8cqw8\" (UID: \"4cfa9728-de47-4dfa-96d9-53b1c591e650\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.931480 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f7p4sp\" (UID: \"cbe56e26-bee2-4664-abc8-2d7ff76aa32e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.931549 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert\") pod \"infra-operator-controller-manager-78d48bff9d-nxm5d\" (UID: \"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.931569 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mmkk\" (UniqueName: \"kubernetes.io/projected/9c67d467-660a-4bbc-a32c-b197db949502-kube-api-access-4mmkk\") pod \"ovn-operator-controller-manager-b6456fdb6-zxmcp\" (UID: \"9c67d467-660a-4bbc-a32c-b197db949502\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.931600 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf4bn\" (UniqueName: \"kubernetes.io/projected/17fc5fff-819f-4786-8e6d-9a0d6265e8ce-kube-api-access-cf4bn\") pod \"telemetry-operator-controller-manager-65f6d9c768-xfcfd\" (UID: \"17fc5fff-819f-4786-8e6d-9a0d6265e8ce\") " pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.931630 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bjmv\" (UniqueName: \"kubernetes.io/projected/fb8b700a-18a4-49ce-86cb-a38e2ff4cb58-kube-api-access-9bjmv\") pod \"swift-operator-controller-manager-9d58d64bc-5s8dd\" (UID: \"fb8b700a-18a4-49ce-86cb-a38e2ff4cb58\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.931651 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxg2j\" (UniqueName: \"kubernetes.io/projected/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-kube-api-access-kxg2j\") pod \"openstack-baremetal-operator-controller-manager-84b575879f7p4sp\" (UID: \"cbe56e26-bee2-4664-abc8-2d7ff76aa32e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:24 crc kubenswrapper[4912]: E1208 21:37:24.932099 4912 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:37:24 crc kubenswrapper[4912]: E1208 21:37:24.932138 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert podName:cbe56e26-bee2-4664-abc8-2d7ff76aa32e nodeName:}" failed. No retries permitted until 2025-12-08 21:37:25.432124048 +0000 UTC m=+1127.295126121 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert") pod "openstack-baremetal-operator-controller-manager-84b575879f7p4sp" (UID: "cbe56e26-bee2-4664-abc8-2d7ff76aa32e") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:37:24 crc kubenswrapper[4912]: E1208 21:37:24.932274 4912 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 08 21:37:24 crc kubenswrapper[4912]: E1208 21:37:24.932296 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert podName:27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c nodeName:}" failed. No retries permitted until 2025-12-08 21:37:25.932288722 +0000 UTC m=+1127.795290805 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert") pod "infra-operator-controller-manager-78d48bff9d-nxm5d" (UID: "27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c") : secret "infra-operator-webhook-server-cert" not found Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.952124 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mmkk\" (UniqueName: \"kubernetes.io/projected/9c67d467-660a-4bbc-a32c-b197db949502-kube-api-access-4mmkk\") pod \"ovn-operator-controller-manager-b6456fdb6-zxmcp\" (UID: \"9c67d467-660a-4bbc-a32c-b197db949502\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.952182 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.958704 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.984643 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg"] Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.985676 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxg2j\" (UniqueName: \"kubernetes.io/projected/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-kube-api-access-kxg2j\") pod \"openstack-baremetal-operator-controller-manager-84b575879f7p4sp\" (UID: \"cbe56e26-bee2-4664-abc8-2d7ff76aa32e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.986365 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-fsv9f" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.990583 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" Dec 08 21:37:24 crc kubenswrapper[4912]: I1208 21:37:24.995611 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.020571 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bjmv\" (UniqueName: \"kubernetes.io/projected/fb8b700a-18a4-49ce-86cb-a38e2ff4cb58-kube-api-access-9bjmv\") pod \"swift-operator-controller-manager-9d58d64bc-5s8dd\" (UID: \"fb8b700a-18a4-49ce-86cb-a38e2ff4cb58\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.034396 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbdkn\" (UniqueName: \"kubernetes.io/projected/b17c499c-4624-462b-a672-cd23b4b63301-kube-api-access-sbdkn\") pod \"test-operator-controller-manager-5854674fcc-sr59v\" (UID: \"b17c499c-4624-462b-a672-cd23b4b63301\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-sr59v" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.034497 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvdzw\" (UniqueName: \"kubernetes.io/projected/4cfa9728-de47-4dfa-96d9-53b1c591e650-kube-api-access-cvdzw\") pod \"placement-operator-controller-manager-78f8948974-8cqw8\" (UID: \"4cfa9728-de47-4dfa-96d9-53b1c591e650\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.034651 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf4bn\" (UniqueName: \"kubernetes.io/projected/17fc5fff-819f-4786-8e6d-9a0d6265e8ce-kube-api-access-cf4bn\") pod \"telemetry-operator-controller-manager-65f6d9c768-xfcfd\" (UID: \"17fc5fff-819f-4786-8e6d-9a0d6265e8ce\") " pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.056191 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf4bn\" (UniqueName: \"kubernetes.io/projected/17fc5fff-819f-4786-8e6d-9a0d6265e8ce-kube-api-access-cf4bn\") pod \"telemetry-operator-controller-manager-65f6d9c768-xfcfd\" (UID: \"17fc5fff-819f-4786-8e6d-9a0d6265e8ce\") " pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.076770 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvdzw\" (UniqueName: \"kubernetes.io/projected/4cfa9728-de47-4dfa-96d9-53b1c591e650-kube-api-access-cvdzw\") pod \"placement-operator-controller-manager-78f8948974-8cqw8\" (UID: \"4cfa9728-de47-4dfa-96d9-53b1c591e650\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.104718 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh"] Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.106854 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.111171 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-v4cw2" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.111413 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.111539 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.123391 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh"] Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.135497 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbdkn\" (UniqueName: \"kubernetes.io/projected/b17c499c-4624-462b-a672-cd23b4b63301-kube-api-access-sbdkn\") pod \"test-operator-controller-manager-5854674fcc-sr59v\" (UID: \"b17c499c-4624-462b-a672-cd23b4b63301\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-sr59v" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.135575 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68ch4\" (UniqueName: \"kubernetes.io/projected/e36bfc63-943e-49f5-ab0b-021474292dc7-kube-api-access-68ch4\") pod \"watcher-operator-controller-manager-667bd8d554-5twmg\" (UID: \"e36bfc63-943e-49f5-ab0b-021474292dc7\") " pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.153915 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbdkn\" (UniqueName: \"kubernetes.io/projected/b17c499c-4624-462b-a672-cd23b4b63301-kube-api-access-sbdkn\") pod \"test-operator-controller-manager-5854674fcc-sr59v\" (UID: \"b17c499c-4624-462b-a672-cd23b4b63301\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-sr59v" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.156551 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx"] Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.157784 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.163630 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-hwtv4" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.168939 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx"] Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.190764 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.213925 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.229101 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.238809 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.238907 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.239071 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68ch4\" (UniqueName: \"kubernetes.io/projected/e36bfc63-943e-49f5-ab0b-021474292dc7-kube-api-access-68ch4\") pod \"watcher-operator-controller-manager-667bd8d554-5twmg\" (UID: \"e36bfc63-943e-49f5-ab0b-021474292dc7\") " pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.239109 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcr8f\" (UniqueName: \"kubernetes.io/projected/99cab708-e8b8-4a28-8a36-f91964fc84e1-kube-api-access-mcr8f\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.248472 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.260372 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68ch4\" (UniqueName: \"kubernetes.io/projected/e36bfc63-943e-49f5-ab0b-021474292dc7-kube-api-access-68ch4\") pod \"watcher-operator-controller-manager-667bd8d554-5twmg\" (UID: \"e36bfc63-943e-49f5-ab0b-021474292dc7\") " pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.321126 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sr59v" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.331414 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.340125 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.340174 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.340288 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcr8f\" (UniqueName: \"kubernetes.io/projected/99cab708-e8b8-4a28-8a36-f91964fc84e1-kube-api-access-mcr8f\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.340334 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8wz2\" (UniqueName: \"kubernetes.io/projected/243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b-kube-api-access-l8wz2\") pod \"rabbitmq-cluster-operator-manager-668c99d594-qgzdx\" (UID: \"243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.340425 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl"] Dec 08 21:37:25 crc kubenswrapper[4912]: E1208 21:37:25.340501 4912 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 08 21:37:25 crc kubenswrapper[4912]: E1208 21:37:25.340556 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs podName:99cab708-e8b8-4a28-8a36-f91964fc84e1 nodeName:}" failed. No retries permitted until 2025-12-08 21:37:25.840537389 +0000 UTC m=+1127.703539472 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs") pod "openstack-operator-controller-manager-54c84cffdd-hp5gh" (UID: "99cab708-e8b8-4a28-8a36-f91964fc84e1") : secret "webhook-server-cert" not found Dec 08 21:37:25 crc kubenswrapper[4912]: E1208 21:37:25.354152 4912 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 08 21:37:25 crc kubenswrapper[4912]: E1208 21:37:25.354257 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs podName:99cab708-e8b8-4a28-8a36-f91964fc84e1 nodeName:}" failed. No retries permitted until 2025-12-08 21:37:25.854232685 +0000 UTC m=+1127.717234768 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs") pod "openstack-operator-controller-manager-54c84cffdd-hp5gh" (UID: "99cab708-e8b8-4a28-8a36-f91964fc84e1") : secret "metrics-server-cert" not found Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.366436 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcr8f\" (UniqueName: \"kubernetes.io/projected/99cab708-e8b8-4a28-8a36-f91964fc84e1-kube-api-access-mcr8f\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.443704 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8wz2\" (UniqueName: \"kubernetes.io/projected/243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b-kube-api-access-l8wz2\") pod \"rabbitmq-cluster-operator-manager-668c99d594-qgzdx\" (UID: \"243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.443868 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f7p4sp\" (UID: \"cbe56e26-bee2-4664-abc8-2d7ff76aa32e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:25 crc kubenswrapper[4912]: E1208 21:37:25.444017 4912 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:37:25 crc kubenswrapper[4912]: E1208 21:37:25.444080 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert podName:cbe56e26-bee2-4664-abc8-2d7ff76aa32e nodeName:}" failed. No retries permitted until 2025-12-08 21:37:26.444065789 +0000 UTC m=+1128.307067872 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert") pod "openstack-baremetal-operator-controller-manager-84b575879f7p4sp" (UID: "cbe56e26-bee2-4664-abc8-2d7ff76aa32e") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.468164 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8wz2\" (UniqueName: \"kubernetes.io/projected/243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b-kube-api-access-l8wz2\") pod \"rabbitmq-cluster-operator-manager-668c99d594-qgzdx\" (UID: \"243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.495985 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.847009 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4"] Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.851481 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:25 crc kubenswrapper[4912]: E1208 21:37:25.851711 4912 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 08 21:37:25 crc kubenswrapper[4912]: E1208 21:37:25.851774 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs podName:99cab708-e8b8-4a28-8a36-f91964fc84e1 nodeName:}" failed. No retries permitted until 2025-12-08 21:37:26.851759783 +0000 UTC m=+1128.714761866 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs") pod "openstack-operator-controller-manager-54c84cffdd-hp5gh" (UID: "99cab708-e8b8-4a28-8a36-f91964fc84e1") : secret "webhook-server-cert" not found Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.860857 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z"] Dec 08 21:37:25 crc kubenswrapper[4912]: W1208 21:37:25.864827 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd737be23_9586_4023_b01e_a9f7161b3b4c.slice/crio-ce65781a3ea00408ac3822bb223222a8f87ef3a052973c9773997e446e0e6630 WatchSource:0}: Error finding container ce65781a3ea00408ac3822bb223222a8f87ef3a052973c9773997e446e0e6630: Status 404 returned error can't find the container with id ce65781a3ea00408ac3822bb223222a8f87ef3a052973c9773997e446e0e6630 Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.884574 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp"] Dec 08 21:37:25 crc kubenswrapper[4912]: W1208 21:37:25.894048 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0bf34a60_0aa4_4408_84f8_7848cf76086f.slice/crio-b08f295ea9e157a961b69a3a527657b540eb065de89fc77bd3ed9d79c6dbcb89 WatchSource:0}: Error finding container b08f295ea9e157a961b69a3a527657b540eb065de89fc77bd3ed9d79c6dbcb89: Status 404 returned error can't find the container with id b08f295ea9e157a961b69a3a527657b540eb065de89fc77bd3ed9d79c6dbcb89 Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.897378 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z"] Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.916193 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" event={"ID":"ce1d46fe-d3fc-4386-a545-3e4513ca68c3","Type":"ContainerStarted","Data":"0e59c632767e19e086ce2667a5cc943c514da28bd2dd6637c17b7881ec14f430"} Dec 08 21:37:25 crc kubenswrapper[4912]: W1208 21:37:25.917304 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8da65eb_3b52_473d_93c2_da58da0d0cfc.slice/crio-1d9ad00187ec9203a06c5c09e1bc2e574a78a51a06c621f8d9dbb0ae4fc0484c WatchSource:0}: Error finding container 1d9ad00187ec9203a06c5c09e1bc2e574a78a51a06c621f8d9dbb0ae4fc0484c: Status 404 returned error can't find the container with id 1d9ad00187ec9203a06c5c09e1bc2e574a78a51a06c621f8d9dbb0ae4fc0484c Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.923235 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4" event={"ID":"d737be23-9586-4023-b01e-a9f7161b3b4c","Type":"ContainerStarted","Data":"ce65781a3ea00408ac3822bb223222a8f87ef3a052973c9773997e446e0e6630"} Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.953325 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert\") pod \"infra-operator-controller-manager-78d48bff9d-nxm5d\" (UID: \"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:25 crc kubenswrapper[4912]: I1208 21:37:25.953388 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:25 crc kubenswrapper[4912]: E1208 21:37:25.953721 4912 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 08 21:37:25 crc kubenswrapper[4912]: E1208 21:37:25.953785 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs podName:99cab708-e8b8-4a28-8a36-f91964fc84e1 nodeName:}" failed. No retries permitted until 2025-12-08 21:37:26.953770254 +0000 UTC m=+1128.816772337 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs") pod "openstack-operator-controller-manager-54c84cffdd-hp5gh" (UID: "99cab708-e8b8-4a28-8a36-f91964fc84e1") : secret "metrics-server-cert" not found Dec 08 21:37:25 crc kubenswrapper[4912]: E1208 21:37:25.953728 4912 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 08 21:37:25 crc kubenswrapper[4912]: E1208 21:37:25.954295 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert podName:27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c nodeName:}" failed. No retries permitted until 2025-12-08 21:37:27.954279747 +0000 UTC m=+1129.817281830 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert") pod "infra-operator-controller-manager-78d48bff9d-nxm5d" (UID: "27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c") : secret "infra-operator-webhook-server-cert" not found Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.140097 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt"] Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.151439 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v"] Dec 08 21:37:26 crc kubenswrapper[4912]: W1208 21:37:26.157960 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92f66d4c_5b7c_4bc0_820d_3319fa35a16b.slice/crio-3cf97feb703aa0fd0449d0cf35483cf15302f29e3dc727675f45a9369a907105 WatchSource:0}: Error finding container 3cf97feb703aa0fd0449d0cf35483cf15302f29e3dc727675f45a9369a907105: Status 404 returned error can't find the container with id 3cf97feb703aa0fd0449d0cf35483cf15302f29e3dc727675f45a9369a907105 Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.175464 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf"] Dec 08 21:37:26 crc kubenswrapper[4912]: W1208 21:37:26.179452 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7967d486_bea2_4064_8fbd_658052c9ac9f.slice/crio-f50d55beb352bfa5140771968931bc0b88f14e36cb775d02a83840086a2511a6 WatchSource:0}: Error finding container f50d55beb352bfa5140771968931bc0b88f14e36cb775d02a83840086a2511a6: Status 404 returned error can't find the container with id f50d55beb352bfa5140771968931bc0b88f14e36cb775d02a83840086a2511a6 Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.192813 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-sr59v"] Dec 08 21:37:26 crc kubenswrapper[4912]: W1208 21:37:26.193436 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb17c499c_4624_462b_a672_cd23b4b63301.slice/crio-3b00cd5394c1df615328a1f13aac2eafa1ccb164628cf65adb77fbd6a8c78ab2 WatchSource:0}: Error finding container 3b00cd5394c1df615328a1f13aac2eafa1ccb164628cf65adb77fbd6a8c78ab2: Status 404 returned error can't find the container with id 3b00cd5394c1df615328a1f13aac2eafa1ccb164628cf65adb77fbd6a8c78ab2 Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.200213 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8"] Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.205207 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67"] Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.214469 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc"] Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.218329 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wklrj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-dqmmb_openstack-operators(b95889d5-9404-4bc5-867a-5bf1492855db): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.218464 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ztq75,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-967d97867-fmxw5_openstack-operators(ef95618f-ec2f-438b-ba4f-15cb5e057b6e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.220050 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4mmkk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-zxmcp_openstack-operators(9c67d467-660a-4bbc-a32c-b197db949502): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.220121 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp"] Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.222028 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:44126f9c6b1d2bf752ddf989e20a4fc4cc1c07723d4fcb78465ccb2f55da6b3a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z85zv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-5b5fd79c9c-xjlfc_openstack-operators(dd47e823-cffc-4455-ae03-a29000d733ab): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.229001 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z85zv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-5b5fd79c9c-xjlfc_openstack-operators(dd47e823-cffc-4455-ae03-a29000d733ab): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.229162 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4mmkk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-zxmcp_openstack-operators(9c67d467-660a-4bbc-a32c-b197db949502): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.229187 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wklrj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-dqmmb_openstack-operators(b95889d5-9404-4bc5-867a-5bf1492855db): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.229302 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ztq75,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-967d97867-fmxw5_openstack-operators(ef95618f-ec2f-438b-ba4f-15cb5e057b6e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.229342 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.83:5001/openstack-k8s-operators/telemetry-operator:00aa5f531d8a387f8456969f47533ccbe3dc4576,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cf4bn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.230316 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" podUID="9c67d467-660a-4bbc-a32c-b197db949502" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.230368 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" podUID="dd47e823-cffc-4455-ae03-a29000d733ab" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.230403 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" podUID="ef95618f-ec2f-438b-ba4f-15cb5e057b6e" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.230435 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" podUID="b95889d5-9404-4bc5-867a-5bf1492855db" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.231915 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cf4bn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.231987 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj"] Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.232739 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l8wz2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-qgzdx_openstack-operators(243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.233274 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.233348 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:6b3e0302608a2e70f9b5ae9167f6fbf59264f226d9db99d48f70466ab2f216b8,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-68ch4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-667bd8d554-5twmg_openstack-operators(e36bfc63-943e-49f5-ab0b-021474292dc7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.233430 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9bjmv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9d58d64bc-5s8dd_openstack-operators(fb8b700a-18a4-49ce-86cb-a38e2ff4cb58): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.233973 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx" podUID="243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.236491 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9bjmv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9d58d64bc-5s8dd_openstack-operators(fb8b700a-18a4-49ce-86cb-a38e2ff4cb58): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.236991 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-68ch4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-667bd8d554-5twmg_openstack-operators(e36bfc63-943e-49f5-ab0b-021474292dc7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.237813 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" podUID="fb8b700a-18a4-49ce-86cb-a38e2ff4cb58" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.238769 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" podUID="e36bfc63-943e-49f5-ab0b-021474292dc7" Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.239674 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd"] Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.247133 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb"] Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.252939 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5"] Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.279213 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd"] Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.289072 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx"] Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.299315 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg"] Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.461614 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f7p4sp\" (UID: \"cbe56e26-bee2-4664-abc8-2d7ff76aa32e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.461858 4912 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.461913 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert podName:cbe56e26-bee2-4664-abc8-2d7ff76aa32e nodeName:}" failed. No retries permitted until 2025-12-08 21:37:28.461895937 +0000 UTC m=+1130.324898020 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert") pod "openstack-baremetal-operator-controller-manager-84b575879f7p4sp" (UID: "cbe56e26-bee2-4664-abc8-2d7ff76aa32e") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.867619 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.867789 4912 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.867860 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs podName:99cab708-e8b8-4a28-8a36-f91964fc84e1 nodeName:}" failed. No retries permitted until 2025-12-08 21:37:28.867840716 +0000 UTC m=+1130.730842799 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs") pod "openstack-operator-controller-manager-54c84cffdd-hp5gh" (UID: "99cab708-e8b8-4a28-8a36-f91964fc84e1") : secret "webhook-server-cert" not found Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.940958 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" event={"ID":"92f66d4c-5b7c-4bc0-820d-3319fa35a16b","Type":"ContainerStarted","Data":"3cf97feb703aa0fd0449d0cf35483cf15302f29e3dc727675f45a9369a907105"} Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.944610 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" event={"ID":"9c67d467-660a-4bbc-a32c-b197db949502","Type":"ContainerStarted","Data":"ddb41f6b3b1950285dbd931121038c0d351be487eb1097909de805c99d238069"} Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.949865 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" podUID="9c67d467-660a-4bbc-a32c-b197db949502" Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.955323 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sr59v" event={"ID":"b17c499c-4624-462b-a672-cd23b4b63301","Type":"ContainerStarted","Data":"3b00cd5394c1df615328a1f13aac2eafa1ccb164628cf65adb77fbd6a8c78ab2"} Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.964327 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj" event={"ID":"b47b551b-07c5-4fc7-b6a9-76208870148f","Type":"ContainerStarted","Data":"92b649e6f1ba3bb29d87333916cdf7f25b1da3c99b45864b4b85fa982bf2d044"} Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.969561 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.969627 4912 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.969695 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs podName:99cab708-e8b8-4a28-8a36-f91964fc84e1 nodeName:}" failed. No retries permitted until 2025-12-08 21:37:28.969680542 +0000 UTC m=+1130.832682625 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs") pod "openstack-operator-controller-manager-54c84cffdd-hp5gh" (UID: "99cab708-e8b8-4a28-8a36-f91964fc84e1") : secret "metrics-server-cert" not found Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.977256 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" event={"ID":"7967d486-bea2-4064-8fbd-658052c9ac9f","Type":"ContainerStarted","Data":"f50d55beb352bfa5140771968931bc0b88f14e36cb775d02a83840086a2511a6"} Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.979965 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" event={"ID":"dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33","Type":"ContainerStarted","Data":"8d248c6640098703cbdfceb3cc1a3c117054431fcbcff5827417b21fc5ad2626"} Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.982065 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" event={"ID":"ef95618f-ec2f-438b-ba4f-15cb5e057b6e","Type":"ContainerStarted","Data":"c3bc4bb73abf40d1f9f89ec9aeec3133622cc09296d9416cb1c281860ff82081"} Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.985870 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" podUID="ef95618f-ec2f-438b-ba4f-15cb5e057b6e" Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.986081 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z" event={"ID":"0bf34a60-0aa4-4408-84f8-7848cf76086f","Type":"ContainerStarted","Data":"b08f295ea9e157a961b69a3a527657b540eb065de89fc77bd3ed9d79c6dbcb89"} Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.988764 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" event={"ID":"b95889d5-9404-4bc5-867a-5bf1492855db","Type":"ContainerStarted","Data":"89e43c7a3f475828699459f80391e8d52b0805ff8f0ea7f59954299ba9731010"} Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.990406 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" event={"ID":"a8da65eb-3b52-473d-93c2-da58da0d0cfc","Type":"ContainerStarted","Data":"1d9ad00187ec9203a06c5c09e1bc2e574a78a51a06c621f8d9dbb0ae4fc0484c"} Dec 08 21:37:26 crc kubenswrapper[4912]: E1208 21:37:26.993109 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" podUID="b95889d5-9404-4bc5-867a-5bf1492855db" Dec 08 21:37:26 crc kubenswrapper[4912]: I1208 21:37:26.999083 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx" event={"ID":"243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b","Type":"ContainerStarted","Data":"45c45579d1a235e099b14d6adec487e9da5216ea3ffb857364937207932cb865"} Dec 08 21:37:27 crc kubenswrapper[4912]: E1208 21:37:27.002791 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx" podUID="243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b" Dec 08 21:37:27 crc kubenswrapper[4912]: I1208 21:37:27.007272 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerStarted","Data":"3f0d1361d8d14bcaebcfc0b7ee8debc72cfcfabb0a873048ce42e7b051a737eb"} Dec 08 21:37:27 crc kubenswrapper[4912]: E1208 21:37:27.009511 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.83:5001/openstack-k8s-operators/telemetry-operator:00aa5f531d8a387f8456969f47533ccbe3dc4576\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:37:27 crc kubenswrapper[4912]: I1208 21:37:27.010613 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v" event={"ID":"0e45e244-50c1-4b0b-8e49-615f31b2cf2c","Type":"ContainerStarted","Data":"0a8382b8156c5d6de7e1c8db2f54dac320ec2e4a3bbb14cf4e4f6c73d2065d0e"} Dec 08 21:37:27 crc kubenswrapper[4912]: I1208 21:37:27.017320 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" event={"ID":"4cfa9728-de47-4dfa-96d9-53b1c591e650","Type":"ContainerStarted","Data":"2d45fe25ff3a97bc7b542092a89d4afb1a126c0d9beef340383b62e5861afcc1"} Dec 08 21:37:27 crc kubenswrapper[4912]: I1208 21:37:27.023252 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z" event={"ID":"b4ebbd1e-bbd8-4290-8745-ed80c67abf66","Type":"ContainerStarted","Data":"947f402adc8ea89b0e771f25422fbeb11a4d5cda2a315b81f706b5cea629973d"} Dec 08 21:37:27 crc kubenswrapper[4912]: I1208 21:37:27.025587 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" event={"ID":"dd47e823-cffc-4455-ae03-a29000d733ab","Type":"ContainerStarted","Data":"1cbed4a01a3aeb60161a2550ff4fc5200eaa113431aeefc8c65957305d092918"} Dec 08 21:37:27 crc kubenswrapper[4912]: E1208 21:37:27.035841 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:44126f9c6b1d2bf752ddf989e20a4fc4cc1c07723d4fcb78465ccb2f55da6b3a\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" podUID="dd47e823-cffc-4455-ae03-a29000d733ab" Dec 08 21:37:27 crc kubenswrapper[4912]: I1208 21:37:27.035934 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" event={"ID":"e36bfc63-943e-49f5-ab0b-021474292dc7","Type":"ContainerStarted","Data":"34f95c27dd9944bcb29f49a292b68c050548671a63330662d2df3f329c390ce6"} Dec 08 21:37:27 crc kubenswrapper[4912]: E1208 21:37:27.040190 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:6b3e0302608a2e70f9b5ae9167f6fbf59264f226d9db99d48f70466ab2f216b8\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" podUID="e36bfc63-943e-49f5-ab0b-021474292dc7" Dec 08 21:37:27 crc kubenswrapper[4912]: I1208 21:37:27.054689 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" event={"ID":"fb8b700a-18a4-49ce-86cb-a38e2ff4cb58","Type":"ContainerStarted","Data":"33359b0d650f162a86ae1c2186fe4bdfd67de15489841dd0c720f9645e59ea07"} Dec 08 21:37:27 crc kubenswrapper[4912]: E1208 21:37:27.069277 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" podUID="fb8b700a-18a4-49ce-86cb-a38e2ff4cb58" Dec 08 21:37:27 crc kubenswrapper[4912]: I1208 21:37:27.989547 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert\") pod \"infra-operator-controller-manager-78d48bff9d-nxm5d\" (UID: \"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:27 crc kubenswrapper[4912]: E1208 21:37:27.989968 4912 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 08 21:37:27 crc kubenswrapper[4912]: E1208 21:37:27.990018 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert podName:27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c nodeName:}" failed. No retries permitted until 2025-12-08 21:37:31.990004725 +0000 UTC m=+1133.853006808 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert") pod "infra-operator-controller-manager-78d48bff9d-nxm5d" (UID: "27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c") : secret "infra-operator-webhook-server-cert" not found Dec 08 21:37:28 crc kubenswrapper[4912]: E1208 21:37:28.071543 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" podUID="ef95618f-ec2f-438b-ba4f-15cb5e057b6e" Dec 08 21:37:28 crc kubenswrapper[4912]: E1208 21:37:28.071773 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:6b3e0302608a2e70f9b5ae9167f6fbf59264f226d9db99d48f70466ab2f216b8\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" podUID="e36bfc63-943e-49f5-ab0b-021474292dc7" Dec 08 21:37:28 crc kubenswrapper[4912]: E1208 21:37:28.071874 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:44126f9c6b1d2bf752ddf989e20a4fc4cc1c07723d4fcb78465ccb2f55da6b3a\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" podUID="dd47e823-cffc-4455-ae03-a29000d733ab" Dec 08 21:37:28 crc kubenswrapper[4912]: E1208 21:37:28.072215 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" podUID="9c67d467-660a-4bbc-a32c-b197db949502" Dec 08 21:37:28 crc kubenswrapper[4912]: E1208 21:37:28.072774 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" podUID="fb8b700a-18a4-49ce-86cb-a38e2ff4cb58" Dec 08 21:37:28 crc kubenswrapper[4912]: E1208 21:37:28.072846 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" podUID="b95889d5-9404-4bc5-867a-5bf1492855db" Dec 08 21:37:28 crc kubenswrapper[4912]: E1208 21:37:28.073563 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.83:5001/openstack-k8s-operators/telemetry-operator:00aa5f531d8a387f8456969f47533ccbe3dc4576\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:37:28 crc kubenswrapper[4912]: E1208 21:37:28.074130 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx" podUID="243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b" Dec 08 21:37:28 crc kubenswrapper[4912]: I1208 21:37:28.503144 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f7p4sp\" (UID: \"cbe56e26-bee2-4664-abc8-2d7ff76aa32e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:28 crc kubenswrapper[4912]: E1208 21:37:28.503313 4912 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:37:28 crc kubenswrapper[4912]: E1208 21:37:28.503352 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert podName:cbe56e26-bee2-4664-abc8-2d7ff76aa32e nodeName:}" failed. No retries permitted until 2025-12-08 21:37:32.503340144 +0000 UTC m=+1134.366342217 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert") pod "openstack-baremetal-operator-controller-manager-84b575879f7p4sp" (UID: "cbe56e26-bee2-4664-abc8-2d7ff76aa32e") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:37:28 crc kubenswrapper[4912]: I1208 21:37:28.912064 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:28 crc kubenswrapper[4912]: E1208 21:37:28.912251 4912 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 08 21:37:28 crc kubenswrapper[4912]: E1208 21:37:28.912326 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs podName:99cab708-e8b8-4a28-8a36-f91964fc84e1 nodeName:}" failed. No retries permitted until 2025-12-08 21:37:32.91230855 +0000 UTC m=+1134.775310643 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs") pod "openstack-operator-controller-manager-54c84cffdd-hp5gh" (UID: "99cab708-e8b8-4a28-8a36-f91964fc84e1") : secret "webhook-server-cert" not found Dec 08 21:37:29 crc kubenswrapper[4912]: I1208 21:37:29.014210 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:29 crc kubenswrapper[4912]: E1208 21:37:29.014461 4912 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 08 21:37:29 crc kubenswrapper[4912]: E1208 21:37:29.014523 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs podName:99cab708-e8b8-4a28-8a36-f91964fc84e1 nodeName:}" failed. No retries permitted until 2025-12-08 21:37:33.014503856 +0000 UTC m=+1134.877505939 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs") pod "openstack-operator-controller-manager-54c84cffdd-hp5gh" (UID: "99cab708-e8b8-4a28-8a36-f91964fc84e1") : secret "metrics-server-cert" not found Dec 08 21:37:32 crc kubenswrapper[4912]: I1208 21:37:32.076099 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert\") pod \"infra-operator-controller-manager-78d48bff9d-nxm5d\" (UID: \"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:32 crc kubenswrapper[4912]: E1208 21:37:32.076486 4912 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 08 21:37:32 crc kubenswrapper[4912]: E1208 21:37:32.076641 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert podName:27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c nodeName:}" failed. No retries permitted until 2025-12-08 21:37:40.076620984 +0000 UTC m=+1141.939623067 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert") pod "infra-operator-controller-manager-78d48bff9d-nxm5d" (UID: "27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c") : secret "infra-operator-webhook-server-cert" not found Dec 08 21:37:32 crc kubenswrapper[4912]: I1208 21:37:32.583585 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f7p4sp\" (UID: \"cbe56e26-bee2-4664-abc8-2d7ff76aa32e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:32 crc kubenswrapper[4912]: E1208 21:37:32.583761 4912 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:37:32 crc kubenswrapper[4912]: E1208 21:37:32.583827 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert podName:cbe56e26-bee2-4664-abc8-2d7ff76aa32e nodeName:}" failed. No retries permitted until 2025-12-08 21:37:40.583807652 +0000 UTC m=+1142.446809735 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert") pod "openstack-baremetal-operator-controller-manager-84b575879f7p4sp" (UID: "cbe56e26-bee2-4664-abc8-2d7ff76aa32e") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 08 21:37:32 crc kubenswrapper[4912]: I1208 21:37:32.991095 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:32 crc kubenswrapper[4912]: E1208 21:37:32.991262 4912 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 08 21:37:32 crc kubenswrapper[4912]: E1208 21:37:32.991346 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs podName:99cab708-e8b8-4a28-8a36-f91964fc84e1 nodeName:}" failed. No retries permitted until 2025-12-08 21:37:40.991326892 +0000 UTC m=+1142.854328965 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs") pod "openstack-operator-controller-manager-54c84cffdd-hp5gh" (UID: "99cab708-e8b8-4a28-8a36-f91964fc84e1") : secret "webhook-server-cert" not found Dec 08 21:37:33 crc kubenswrapper[4912]: I1208 21:37:33.092674 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:33 crc kubenswrapper[4912]: E1208 21:37:33.092865 4912 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 08 21:37:33 crc kubenswrapper[4912]: E1208 21:37:33.092941 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs podName:99cab708-e8b8-4a28-8a36-f91964fc84e1 nodeName:}" failed. No retries permitted until 2025-12-08 21:37:41.092923072 +0000 UTC m=+1142.955925155 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs") pod "openstack-operator-controller-manager-54c84cffdd-hp5gh" (UID: "99cab708-e8b8-4a28-8a36-f91964fc84e1") : secret "metrics-server-cert" not found Dec 08 21:37:39 crc kubenswrapper[4912]: I1208 21:37:39.173724 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v" event={"ID":"0e45e244-50c1-4b0b-8e49-615f31b2cf2c","Type":"ContainerStarted","Data":"98a0e4712bd14b0e4c2788343a0a2035d7c6102eb7786329732e0c7a182209c9"} Dec 08 21:37:39 crc kubenswrapper[4912]: I1208 21:37:39.177477 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" event={"ID":"ce1d46fe-d3fc-4386-a545-3e4513ca68c3","Type":"ContainerStarted","Data":"2b1cc527240d67fb23f6c8ba8084bf95242603a3d2590879449c59bc3eb6874c"} Dec 08 21:37:39 crc kubenswrapper[4912]: E1208 21:37:39.425201 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bqh7t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-79c8c4686c-wpc67_openstack-operators(7967d486-bea2-4064-8fbd-658052c9ac9f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:39 crc kubenswrapper[4912]: E1208 21:37:39.426347 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" podUID="7967d486-bea2-4064-8fbd-658052c9ac9f" Dec 08 21:37:39 crc kubenswrapper[4912]: E1208 21:37:39.431615 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wklrj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-dqmmb_openstack-operators(b95889d5-9404-4bc5-867a-5bf1492855db): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:39 crc kubenswrapper[4912]: E1208 21:37:39.440426 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wklrj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-dqmmb_openstack-operators(b95889d5-9404-4bc5-867a-5bf1492855db): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 08 21:37:39 crc kubenswrapper[4912]: E1208 21:37:39.443203 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" podUID="b95889d5-9404-4bc5-867a-5bf1492855db" Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.117336 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert\") pod \"infra-operator-controller-manager-78d48bff9d-nxm5d\" (UID: \"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:40 crc kubenswrapper[4912]: E1208 21:37:40.117493 4912 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 08 21:37:40 crc kubenswrapper[4912]: E1208 21:37:40.117798 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert podName:27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c nodeName:}" failed. No retries permitted until 2025-12-08 21:37:56.117776579 +0000 UTC m=+1157.980778722 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert") pod "infra-operator-controller-manager-78d48bff9d-nxm5d" (UID: "27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c") : secret "infra-operator-webhook-server-cert" not found Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.197439 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4" event={"ID":"d737be23-9586-4023-b01e-a9f7161b3b4c","Type":"ContainerStarted","Data":"7d0166790bf0dc1aff5347958cc18b0f343f98668ce6d2a23eda870f0ab56304"} Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.200230 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z" event={"ID":"b4ebbd1e-bbd8-4290-8745-ed80c67abf66","Type":"ContainerStarted","Data":"9b873846bbb11210af289a4c203286619a72621add2a91d0d6adb336d2a2003e"} Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.201984 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" event={"ID":"dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33","Type":"ContainerStarted","Data":"e9cd0f9141dc5864d054688033bf660826b7d7099b3d71ba0ccfa10cead5d0d3"} Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.207344 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sr59v" event={"ID":"b17c499c-4624-462b-a672-cd23b4b63301","Type":"ContainerStarted","Data":"7ac5ec87eb2ec4f3ae6c9e1d6cadab34c2bc3834c5390e9b37e5b35ce5bf5d4b"} Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.213375 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" event={"ID":"a8da65eb-3b52-473d-93c2-da58da0d0cfc","Type":"ContainerStarted","Data":"01932cafbd100a7fd4c3576b01f95856f661b079f15d5f7aae6604f26082be31"} Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.216442 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj" event={"ID":"b47b551b-07c5-4fc7-b6a9-76208870148f","Type":"ContainerStarted","Data":"1e658d577af49ab960a8efb83db2ea10a919c7f5f69c16f65e34dd7b14f7405f"} Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.223804 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" event={"ID":"7967d486-bea2-4064-8fbd-658052c9ac9f","Type":"ContainerStarted","Data":"0919b584fabf802e47acdee902768d2bef0205665baed28da5ea6c58facccc7f"} Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.224436 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" Dec 08 21:37:40 crc kubenswrapper[4912]: E1208 21:37:40.225098 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" podUID="7967d486-bea2-4064-8fbd-658052c9ac9f" Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.233586 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z" event={"ID":"0bf34a60-0aa4-4408-84f8-7848cf76086f","Type":"ContainerStarted","Data":"d745f350336b7cf607126d8f344b6a5646543dd9b8889a1a7ea5f96d257590f6"} Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.238745 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" event={"ID":"92f66d4c-5b7c-4bc0-820d-3319fa35a16b","Type":"ContainerStarted","Data":"360cd125e45888dada7477fcab3d994c32cd0663cb84282921c100adc389c6d1"} Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.248820 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" event={"ID":"4cfa9728-de47-4dfa-96d9-53b1c591e650","Type":"ContainerStarted","Data":"18968bab813cc766851e2cede3b3b7e79a6b98a24becb78e789f90c5a9fcaaee"} Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.624142 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f7p4sp\" (UID: \"cbe56e26-bee2-4664-abc8-2d7ff76aa32e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.634716 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbe56e26-bee2-4664-abc8-2d7ff76aa32e-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f7p4sp\" (UID: \"cbe56e26-bee2-4664-abc8-2d7ff76aa32e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.780405 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-km6cr" Dec 08 21:37:40 crc kubenswrapper[4912]: I1208 21:37:40.784481 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:37:41 crc kubenswrapper[4912]: I1208 21:37:41.032288 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:41 crc kubenswrapper[4912]: I1208 21:37:41.055661 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-webhook-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:41 crc kubenswrapper[4912]: I1208 21:37:41.137872 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:41 crc kubenswrapper[4912]: I1208 21:37:41.142759 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99cab708-e8b8-4a28-8a36-f91964fc84e1-metrics-certs\") pod \"openstack-operator-controller-manager-54c84cffdd-hp5gh\" (UID: \"99cab708-e8b8-4a28-8a36-f91964fc84e1\") " pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:41 crc kubenswrapper[4912]: E1208 21:37:41.259119 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" podUID="7967d486-bea2-4064-8fbd-658052c9ac9f" Dec 08 21:37:41 crc kubenswrapper[4912]: I1208 21:37:41.348197 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-v4cw2" Dec 08 21:37:41 crc kubenswrapper[4912]: I1208 21:37:41.356808 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:41 crc kubenswrapper[4912]: I1208 21:37:41.497014 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp"] Dec 08 21:37:41 crc kubenswrapper[4912]: I1208 21:37:41.852513 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh"] Dec 08 21:37:42 crc kubenswrapper[4912]: W1208 21:37:42.127071 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99cab708_e8b8_4a28_8a36_f91964fc84e1.slice/crio-5218ae847e314a74440c08e67506fe5cefa7ff10a9654748f2c63d1b5da1383a WatchSource:0}: Error finding container 5218ae847e314a74440c08e67506fe5cefa7ff10a9654748f2c63d1b5da1383a: Status 404 returned error can't find the container with id 5218ae847e314a74440c08e67506fe5cefa7ff10a9654748f2c63d1b5da1383a Dec 08 21:37:42 crc kubenswrapper[4912]: I1208 21:37:42.274810 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" event={"ID":"cbe56e26-bee2-4664-abc8-2d7ff76aa32e","Type":"ContainerStarted","Data":"1a22494b17a225808d1d2f78b45f618eea0615ecd9952c0e525c6d38311b97ad"} Dec 08 21:37:42 crc kubenswrapper[4912]: I1208 21:37:42.276064 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" event={"ID":"99cab708-e8b8-4a28-8a36-f91964fc84e1","Type":"ContainerStarted","Data":"5218ae847e314a74440c08e67506fe5cefa7ff10a9654748f2c63d1b5da1383a"} Dec 08 21:37:43 crc kubenswrapper[4912]: I1208 21:37:43.290118 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" event={"ID":"99cab708-e8b8-4a28-8a36-f91964fc84e1","Type":"ContainerStarted","Data":"e7f6ca474fb4140613b7e485c382f9ace6ff55db7d528fb0ff43976e9164ebbf"} Dec 08 21:37:43 crc kubenswrapper[4912]: I1208 21:37:43.290615 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:43 crc kubenswrapper[4912]: I1208 21:37:43.325246 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" podStartSLOduration=19.325205553 podStartE2EDuration="19.325205553s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:37:43.31778427 +0000 UTC m=+1145.180786353" watchObservedRunningTime="2025-12-08 21:37:43.325205553 +0000 UTC m=+1145.188207636" Dec 08 21:37:44 crc kubenswrapper[4912]: I1208 21:37:44.843907 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" Dec 08 21:37:44 crc kubenswrapper[4912]: E1208 21:37:44.846029 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" podUID="7967d486-bea2-4064-8fbd-658052c9ac9f" Dec 08 21:37:51 crc kubenswrapper[4912]: I1208 21:37:51.363386 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-54c84cffdd-hp5gh" Dec 08 21:37:54 crc kubenswrapper[4912]: E1208 21:37:54.434916 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" podUID="b95889d5-9404-4bc5-867a-5bf1492855db" Dec 08 21:37:56 crc kubenswrapper[4912]: I1208 21:37:56.165202 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert\") pod \"infra-operator-controller-manager-78d48bff9d-nxm5d\" (UID: \"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:56 crc kubenswrapper[4912]: I1208 21:37:56.171579 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c-cert\") pod \"infra-operator-controller-manager-78d48bff9d-nxm5d\" (UID: \"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:37:56 crc kubenswrapper[4912]: I1208 21:37:56.329896 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-92jhm" Dec 08 21:37:56 crc kubenswrapper[4912]: I1208 21:37:56.337945 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.274322 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.275187 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4mmkk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-zxmcp_openstack-operators(9c67d467-660a-4bbc-a32c-b197db949502): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.281288 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.281460 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dplcb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-9xkjf_openstack-operators(dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.282674 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" podUID="dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.299733 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.300091 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hfqzw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-qsllt_openstack-operators(92f66d4c-5b7c-4bc0-820d-3319fa35a16b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.301268 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" podUID="92f66d4c-5b7c-4bc0-820d-3319fa35a16b" Dec 08 21:38:01 crc kubenswrapper[4912]: I1208 21:38:01.429915 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" Dec 08 21:38:01 crc kubenswrapper[4912]: I1208 21:38:01.430027 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" Dec 08 21:38:01 crc kubenswrapper[4912]: I1208 21:38:01.432299 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" Dec 08 21:38:01 crc kubenswrapper[4912]: I1208 21:38:01.432564 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.434375 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" podUID="dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.434399 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" podUID="92f66d4c-5b7c-4bc0-820d-3319fa35a16b" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.880593 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:6b3e0302608a2e70f9b5ae9167f6fbf59264f226d9db99d48f70466ab2f216b8" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.880792 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:6b3e0302608a2e70f9b5ae9167f6fbf59264f226d9db99d48f70466ab2f216b8,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-68ch4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-667bd8d554-5twmg_openstack-operators(e36bfc63-943e-49f5-ab0b-021474292dc7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.918602 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.918795 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g4rgd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-5697bb5779-54mxl_openstack-operators(ce1d46fe-d3fc-4386-a545-3e4513ca68c3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:38:01 crc kubenswrapper[4912]: E1208 21:38:01.919976 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" podUID="ce1d46fe-d3fc-4386-a545-3e4513ca68c3" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.393526 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:9d539fb6b72f91cfc6200bb91b7c6dbaeab17c7711342dd3a9549c66762a2d48" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.394259 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:9d539fb6b72f91cfc6200bb91b7c6dbaeab17c7711342dd3a9549c66762a2d48,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_API_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_PROC_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-processor:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kxg2j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-84b575879f7p4sp_openstack-operators(cbe56e26-bee2-4664-abc8-2d7ff76aa32e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.401318 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.401954 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-c8zpg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-hfdwp_openstack-operators(a8da65eb-3b52-473d-93c2-da58da0d0cfc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.403435 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" podUID="a8da65eb-3b52-473d-93c2-da58da0d0cfc" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.406116 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.406255 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cvdzw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-8cqw8_openstack-operators(4cfa9728-de47-4dfa-96d9-53b1c591e650): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.408321 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" podUID="4cfa9728-de47-4dfa-96d9-53b1c591e650" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.438390 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" podUID="4cfa9728-de47-4dfa-96d9-53b1c591e650" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.438409 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" podUID="dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.438436 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" podUID="a8da65eb-3b52-473d-93c2-da58da0d0cfc" Dec 08 21:38:02 crc kubenswrapper[4912]: I1208 21:38:02.438408 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.438896 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" podUID="92f66d4c-5b7c-4bc0-820d-3319fa35a16b" Dec 08 21:38:02 crc kubenswrapper[4912]: I1208 21:38:02.439206 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.439617 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" podUID="ce1d46fe-d3fc-4386-a545-3e4513ca68c3" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.936318 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.936826 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ztq75,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-967d97867-fmxw5_openstack-operators(ef95618f-ec2f-438b-ba4f-15cb5e057b6e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.942336 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.942518 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-v6lp5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-tsz7z_openstack-operators(b4ebbd1e-bbd8-4290-8745-ed80c67abf66): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.956640 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z" podUID="b4ebbd1e-bbd8-4290-8745-ed80c67abf66" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.968433 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.968592 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sbdkn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-sr59v_openstack-operators(b17c499c-4624-462b-a672-cd23b4b63301): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:38:02 crc kubenswrapper[4912]: E1208 21:38:02.969852 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sr59v" podUID="b17c499c-4624-462b-a672-cd23b4b63301" Dec 08 21:38:03 crc kubenswrapper[4912]: I1208 21:38:03.459938 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z" Dec 08 21:38:03 crc kubenswrapper[4912]: I1208 21:38:03.463270 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z" Dec 08 21:38:04 crc kubenswrapper[4912]: I1208 21:38:04.440723 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" Dec 08 21:38:04 crc kubenswrapper[4912]: I1208 21:38:04.441061 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" Dec 08 21:38:04 crc kubenswrapper[4912]: E1208 21:38:04.759097 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" podUID="ef95618f-ec2f-438b-ba4f-15cb5e057b6e" Dec 08 21:38:04 crc kubenswrapper[4912]: I1208 21:38:04.890309 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d"] Dec 08 21:38:04 crc kubenswrapper[4912]: W1208 21:38:04.951413 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27d2d4c3_9e6c_4c8c_91e9_715559a9bf4c.slice/crio-580279ec198a375368580b651792658f0f4784510b68b71849d8106ecb4122f0 WatchSource:0}: Error finding container 580279ec198a375368580b651792658f0f4784510b68b71849d8106ecb4122f0: Status 404 returned error can't find the container with id 580279ec198a375368580b651792658f0f4784510b68b71849d8106ecb4122f0 Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.229759 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.233057 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.322691 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sr59v" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.325425 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sr59v" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.482549 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" event={"ID":"ce1d46fe-d3fc-4386-a545-3e4513ca68c3","Type":"ContainerStarted","Data":"5979ce7e48f5a2afdce1ac4bcc743d685569b1f1f2a2fc561add49bc4754b89b"} Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.490318 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z" event={"ID":"b4ebbd1e-bbd8-4290-8745-ed80c67abf66","Type":"ContainerStarted","Data":"ae972ccbee3081006ab9c4b5a4b5eea107d33fc443d4cfcf9820227f2a68846e"} Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.499460 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" event={"ID":"fb8b700a-18a4-49ce-86cb-a38e2ff4cb58","Type":"ContainerStarted","Data":"95e54db1cbc4897a82e1a2633532994eb3dd8dd543d8573f19bddef5718e20f9"} Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.510810 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx" event={"ID":"243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b","Type":"ContainerStarted","Data":"b2228fc48e34287fc7f7e7f809e7a15e022c002bcc2746b2f00fd13323eaad51"} Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.513281 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerStarted","Data":"3156642001f58269f8a3ebad775c19bf127b72340dcaa7f55593e3f609abdf3b"} Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.514368 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" event={"ID":"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c","Type":"ContainerStarted","Data":"580279ec198a375368580b651792658f0f4784510b68b71849d8106ecb4122f0"} Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.516346 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sr59v" event={"ID":"b17c499c-4624-462b-a672-cd23b4b63301","Type":"ContainerStarted","Data":"c14f44878a9022fc812ad729e48b7cd5f8a6c5f3e209669d43b6ef0dc0c8de75"} Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.522206 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" event={"ID":"dd47e823-cffc-4455-ae03-a29000d733ab","Type":"ContainerStarted","Data":"76ec7aa9d1924ab9ddc042fbae1063dc11cbb89127f8cf25584bea0926183da7"} Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.523198 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.525695 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-54mxl" podStartSLOduration=28.169099597 podStartE2EDuration="41.525673603s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:25.419942342 +0000 UTC m=+1127.282944425" lastFinishedPulling="2025-12-08 21:37:38.776516348 +0000 UTC m=+1140.639518431" observedRunningTime="2025-12-08 21:38:05.514356098 +0000 UTC m=+1167.377358181" watchObservedRunningTime="2025-12-08 21:38:05.525673603 +0000 UTC m=+1167.388675686" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.536391 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj" event={"ID":"b47b551b-07c5-4fc7-b6a9-76208870148f","Type":"ContainerStarted","Data":"66a478a318e5b3de87a79269504fa05b3e13180431c038d91c0b31e977230349"} Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.537553 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.542125 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.545280 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" event={"ID":"ef95618f-ec2f-438b-ba4f-15cb5e057b6e","Type":"ContainerStarted","Data":"ef3583f25a4855df863ab18d11ec2d334c712c8d8d926601fe11ede817c31607"} Dec 08 21:38:05 crc kubenswrapper[4912]: E1208 21:38:05.549244 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" podUID="ef95618f-ec2f-438b-ba4f-15cb5e057b6e" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.553174 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v" event={"ID":"0e45e244-50c1-4b0b-8e49-615f31b2cf2c","Type":"ContainerStarted","Data":"1c4aff89c578d2b13def99e98a7cdff5d7462fec76fd701f2222337f52dc4f75"} Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.553939 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.563432 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v" Dec 08 21:38:05 crc kubenswrapper[4912]: E1208 21:38:05.629119 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" podUID="9c67d467-660a-4bbc-a32c-b197db949502" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.634751 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgzdx" podStartSLOduration=3.326147805 podStartE2EDuration="41.634736387s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.232599659 +0000 UTC m=+1128.095601742" lastFinishedPulling="2025-12-08 21:38:04.541188241 +0000 UTC m=+1166.404190324" observedRunningTime="2025-12-08 21:38:05.578044383 +0000 UTC m=+1167.441046486" watchObservedRunningTime="2025-12-08 21:38:05.634736387 +0000 UTC m=+1167.497738470" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.653443 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-tsz7z" podStartSLOduration=28.786228483 podStartE2EDuration="41.653423432s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:25.910207522 +0000 UTC m=+1127.773209595" lastFinishedPulling="2025-12-08 21:37:38.777402461 +0000 UTC m=+1140.640404544" observedRunningTime="2025-12-08 21:38:05.640400364 +0000 UTC m=+1167.503402447" watchObservedRunningTime="2025-12-08 21:38:05.653423432 +0000 UTC m=+1167.516425515" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.685349 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-mpssj" podStartSLOduration=3.396163654 podStartE2EDuration="41.685329971s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.206947553 +0000 UTC m=+1128.069949646" lastFinishedPulling="2025-12-08 21:38:04.49611388 +0000 UTC m=+1166.359115963" observedRunningTime="2025-12-08 21:38:05.671691907 +0000 UTC m=+1167.534693990" watchObservedRunningTime="2025-12-08 21:38:05.685329971 +0000 UTC m=+1167.548332054" Dec 08 21:38:05 crc kubenswrapper[4912]: E1208 21:38:05.745747 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" podUID="e36bfc63-943e-49f5-ab0b-021474292dc7" Dec 08 21:38:05 crc kubenswrapper[4912]: E1208 21:38:05.754757 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" podUID="cbe56e26-bee2-4664-abc8-2d7ff76aa32e" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.773427 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" podStartSLOduration=4.068259057 podStartE2EDuration="41.7734037s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.221948352 +0000 UTC m=+1128.084950435" lastFinishedPulling="2025-12-08 21:38:03.927092995 +0000 UTC m=+1165.790095078" observedRunningTime="2025-12-08 21:38:05.768595845 +0000 UTC m=+1167.631597928" watchObservedRunningTime="2025-12-08 21:38:05.7734037 +0000 UTC m=+1167.636405773" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.819956 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sr59v" podStartSLOduration=29.249106461 podStartE2EDuration="41.819938989s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.206581173 +0000 UTC m=+1128.069583246" lastFinishedPulling="2025-12-08 21:37:38.777413691 +0000 UTC m=+1140.640415774" observedRunningTime="2025-12-08 21:38:05.814919819 +0000 UTC m=+1167.677921902" watchObservedRunningTime="2025-12-08 21:38:05.819938989 +0000 UTC m=+1167.682941072" Dec 08 21:38:05 crc kubenswrapper[4912]: I1208 21:38:05.844837 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-r9g4v" podStartSLOduration=3.618305445 podStartE2EDuration="41.844820205s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.154054608 +0000 UTC m=+1128.017056681" lastFinishedPulling="2025-12-08 21:38:04.380569358 +0000 UTC m=+1166.243571441" observedRunningTime="2025-12-08 21:38:05.843924582 +0000 UTC m=+1167.706926665" watchObservedRunningTime="2025-12-08 21:38:05.844820205 +0000 UTC m=+1167.707822288" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.585079 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4" event={"ID":"d737be23-9586-4023-b01e-a9f7161b3b4c","Type":"ContainerStarted","Data":"8ca73c3ca379946ef5412b7955a632e06c41f6b81e9b40cee4f0a102d44b4ed7"} Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.585392 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.587110 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.595396 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z" event={"ID":"0bf34a60-0aa4-4408-84f8-7848cf76086f","Type":"ContainerStarted","Data":"5c2fdd75ec63b96603d390791c2b18e24b2f576dcee3f38564fe4ec2c03f4f72"} Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.595617 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.598585 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.599452 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" event={"ID":"4cfa9728-de47-4dfa-96d9-53b1c591e650","Type":"ContainerStarted","Data":"b2c4f16d1d7079fcc017440df2232838f804eb16c3a18b3760d3a3d9c4487f71"} Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.602576 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" event={"ID":"9c67d467-660a-4bbc-a32c-b197db949502","Type":"ContainerStarted","Data":"7886fb0e9d35188ce93c7cdb5783c26defc38de4a0f6aaf8836689f64ba76e25"} Dec 08 21:38:06 crc kubenswrapper[4912]: E1208 21:38:06.603911 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" podUID="9c67d467-660a-4bbc-a32c-b197db949502" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.610244 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" event={"ID":"a8da65eb-3b52-473d-93c2-da58da0d0cfc","Type":"ContainerStarted","Data":"b0da60646f95d30c2bd95c5d4f184868343e6ec42cabcb9a55d76281ee612444"} Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.612777 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-4gfv4" podStartSLOduration=3.950209841 podStartE2EDuration="42.61275516s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:25.871384333 +0000 UTC m=+1127.734386416" lastFinishedPulling="2025-12-08 21:38:04.533929652 +0000 UTC m=+1166.396931735" observedRunningTime="2025-12-08 21:38:06.604961728 +0000 UTC m=+1168.467963811" watchObservedRunningTime="2025-12-08 21:38:06.61275516 +0000 UTC m=+1168.475757243" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.614698 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" event={"ID":"dd47e823-cffc-4455-ae03-a29000d733ab","Type":"ContainerStarted","Data":"b538ba6a594aef5ef2d79e081f1223b137faceb561e5051e96df0caebe4acaed"} Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.617793 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" event={"ID":"7967d486-bea2-4064-8fbd-658052c9ac9f","Type":"ContainerStarted","Data":"ff9d9765fb4d446c66443ef355cb622a587df9226e079336fba51949d9d6bb55"} Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.622283 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" event={"ID":"e36bfc63-943e-49f5-ab0b-021474292dc7","Type":"ContainerStarted","Data":"11b1cfe57f7e42119bd0262928766aa428384b5291c4b93e235f31156bdc80a2"} Dec 08 21:38:06 crc kubenswrapper[4912]: E1208 21:38:06.630243 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:6b3e0302608a2e70f9b5ae9167f6fbf59264f226d9db99d48f70466ab2f216b8\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" podUID="e36bfc63-943e-49f5-ab0b-021474292dc7" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.631823 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" event={"ID":"fb8b700a-18a4-49ce-86cb-a38e2ff4cb58","Type":"ContainerStarted","Data":"a23d5a1e5a8da5e7f6a8040b6e19c5098c6b0d4619e48017fd6b64dfd499dcfc"} Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.632085 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.637907 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerStarted","Data":"8b09123d0134d98f4d6534cb59cb2a0514691a62f1533e9ba168009d604bd757"} Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.638395 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.642124 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" event={"ID":"cbe56e26-bee2-4664-abc8-2d7ff76aa32e","Type":"ContainerStarted","Data":"2d2198bcacbb30c15853244c148647a9eac92ebac3250973d2c3fd9cb518f572"} Dec 08 21:38:06 crc kubenswrapper[4912]: E1208 21:38:06.643342 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:9d539fb6b72f91cfc6200bb91b7c6dbaeab17c7711342dd3a9549c66762a2d48\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" podUID="cbe56e26-bee2-4664-abc8-2d7ff76aa32e" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.713777 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-plw7z" podStartSLOduration=4.235969536 podStartE2EDuration="42.713758585s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:25.909449392 +0000 UTC m=+1127.772451485" lastFinishedPulling="2025-12-08 21:38:04.387238451 +0000 UTC m=+1166.250240534" observedRunningTime="2025-12-08 21:38:06.706914127 +0000 UTC m=+1168.569916210" watchObservedRunningTime="2025-12-08 21:38:06.713758585 +0000 UTC m=+1168.576760668" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.737596 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8cqw8" podStartSLOduration=30.165298358 podStartE2EDuration="42.737569804s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.200379242 +0000 UTC m=+1128.063381325" lastFinishedPulling="2025-12-08 21:37:38.772650688 +0000 UTC m=+1140.635652771" observedRunningTime="2025-12-08 21:38:06.728493198 +0000 UTC m=+1168.591495281" watchObservedRunningTime="2025-12-08 21:38:06.737569804 +0000 UTC m=+1168.600571887" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.772453 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podStartSLOduration=21.583929956 podStartE2EDuration="42.77243749s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.229236692 +0000 UTC m=+1128.092238775" lastFinishedPulling="2025-12-08 21:37:47.417744236 +0000 UTC m=+1149.280746309" observedRunningTime="2025-12-08 21:38:06.767964963 +0000 UTC m=+1168.630967046" watchObservedRunningTime="2025-12-08 21:38:06.77243749 +0000 UTC m=+1168.635439573" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.806487 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-hfdwp" podStartSLOduration=29.799077382 podStartE2EDuration="42.806467794s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:25.923300582 +0000 UTC m=+1127.786302665" lastFinishedPulling="2025-12-08 21:37:38.930690994 +0000 UTC m=+1140.793693077" observedRunningTime="2025-12-08 21:38:06.804890563 +0000 UTC m=+1168.667892646" watchObservedRunningTime="2025-12-08 21:38:06.806467794 +0000 UTC m=+1168.669469877" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.840637 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" podStartSLOduration=5.144153184 podStartE2EDuration="42.840618811s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.233339738 +0000 UTC m=+1128.096341821" lastFinishedPulling="2025-12-08 21:38:03.929805365 +0000 UTC m=+1165.792807448" observedRunningTime="2025-12-08 21:38:06.835230691 +0000 UTC m=+1168.698232774" watchObservedRunningTime="2025-12-08 21:38:06.840618811 +0000 UTC m=+1168.703620894" Dec 08 21:38:06 crc kubenswrapper[4912]: I1208 21:38:06.875727 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-wpc67" podStartSLOduration=4.470238184 podStartE2EDuration="42.875708813s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.208172175 +0000 UTC m=+1128.071174248" lastFinishedPulling="2025-12-08 21:38:04.613642794 +0000 UTC m=+1166.476644877" observedRunningTime="2025-12-08 21:38:06.874958623 +0000 UTC m=+1168.737960706" watchObservedRunningTime="2025-12-08 21:38:06.875708813 +0000 UTC m=+1168.738710896" Dec 08 21:38:07 crc kubenswrapper[4912]: E1208 21:38:07.673311 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:9d539fb6b72f91cfc6200bb91b7c6dbaeab17c7711342dd3a9549c66762a2d48\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" podUID="cbe56e26-bee2-4664-abc8-2d7ff76aa32e" Dec 08 21:38:09 crc kubenswrapper[4912]: I1208 21:38:09.676188 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" event={"ID":"b95889d5-9404-4bc5-867a-5bf1492855db","Type":"ContainerStarted","Data":"7b7d3b843ff112a35802e69d9d49d2336e27c125c9a6bc792f48e9c88e0d7ab1"} Dec 08 21:38:09 crc kubenswrapper[4912]: I1208 21:38:09.676652 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" event={"ID":"b95889d5-9404-4bc5-867a-5bf1492855db","Type":"ContainerStarted","Data":"b54865319c6e38fc68861934ba531af1942f1cec526398d72d1ec0887e8b3cb6"} Dec 08 21:38:09 crc kubenswrapper[4912]: I1208 21:38:09.676901 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" Dec 08 21:38:09 crc kubenswrapper[4912]: I1208 21:38:09.678708 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" event={"ID":"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c","Type":"ContainerStarted","Data":"654993d0807d7ea8f9fad1cdc87df6b9f26a6895e589ef53a54e7d8d698ecb67"} Dec 08 21:38:09 crc kubenswrapper[4912]: I1208 21:38:09.678746 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" event={"ID":"27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c","Type":"ContainerStarted","Data":"2b0ecd3de36d2a61aa8f054bebe41c03e53cbfd8e5e466e1f4471e5735d9eee4"} Dec 08 21:38:09 crc kubenswrapper[4912]: I1208 21:38:09.678871 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:38:09 crc kubenswrapper[4912]: I1208 21:38:09.698475 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" podStartSLOduration=3.243549079 podStartE2EDuration="45.698456581s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.218182905 +0000 UTC m=+1128.081184988" lastFinishedPulling="2025-12-08 21:38:08.673090407 +0000 UTC m=+1170.536092490" observedRunningTime="2025-12-08 21:38:09.694629271 +0000 UTC m=+1171.557631354" watchObservedRunningTime="2025-12-08 21:38:09.698456581 +0000 UTC m=+1171.561458664" Dec 08 21:38:09 crc kubenswrapper[4912]: I1208 21:38:09.715829 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" podStartSLOduration=42.000715077 podStartE2EDuration="45.715808202s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:38:04.954696156 +0000 UTC m=+1166.817698239" lastFinishedPulling="2025-12-08 21:38:08.669789291 +0000 UTC m=+1170.532791364" observedRunningTime="2025-12-08 21:38:09.713008199 +0000 UTC m=+1171.576010302" watchObservedRunningTime="2025-12-08 21:38:09.715808202 +0000 UTC m=+1171.578810295" Dec 08 21:38:14 crc kubenswrapper[4912]: I1208 21:38:14.443895 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dqmmb" Dec 08 21:38:14 crc kubenswrapper[4912]: I1208 21:38:14.614303 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-xjlfc" Dec 08 21:38:14 crc kubenswrapper[4912]: I1208 21:38:14.716622 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" event={"ID":"92f66d4c-5b7c-4bc0-820d-3319fa35a16b","Type":"ContainerStarted","Data":"a814095d64401b66e75ef5729e7ab26b7cd9607f5563713dbc7f61917d9c6982"} Dec 08 21:38:14 crc kubenswrapper[4912]: I1208 21:38:14.718716 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" event={"ID":"dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33","Type":"ContainerStarted","Data":"464ad436df8e04260830a774c651424dd53a009f2dce56b92fdc17b2cd3331d4"} Dec 08 21:38:14 crc kubenswrapper[4912]: I1208 21:38:14.738771 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-qsllt" podStartSLOduration=38.095326147 podStartE2EDuration="50.738747811s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.177328213 +0000 UTC m=+1128.040330296" lastFinishedPulling="2025-12-08 21:37:38.820749877 +0000 UTC m=+1140.683751960" observedRunningTime="2025-12-08 21:38:14.734664825 +0000 UTC m=+1176.597666918" watchObservedRunningTime="2025-12-08 21:38:14.738747811 +0000 UTC m=+1176.601749894" Dec 08 21:38:14 crc kubenswrapper[4912]: I1208 21:38:14.760177 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-9xkjf" podStartSLOduration=38.148804076 podStartE2EDuration="50.760155587s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.207390654 +0000 UTC m=+1128.070392737" lastFinishedPulling="2025-12-08 21:37:38.818742165 +0000 UTC m=+1140.681744248" observedRunningTime="2025-12-08 21:38:14.754136261 +0000 UTC m=+1176.617138344" watchObservedRunningTime="2025-12-08 21:38:14.760155587 +0000 UTC m=+1176.623157670" Dec 08 21:38:15 crc kubenswrapper[4912]: I1208 21:38:15.217523 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-5s8dd" Dec 08 21:38:15 crc kubenswrapper[4912]: I1208 21:38:15.292764 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:38:16 crc kubenswrapper[4912]: I1208 21:38:16.344952 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-nxm5d" Dec 08 21:38:18 crc kubenswrapper[4912]: E1208 21:38:18.434344 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" podUID="9c67d467-660a-4bbc-a32c-b197db949502" Dec 08 21:38:18 crc kubenswrapper[4912]: E1208 21:38:18.434412 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:6b3e0302608a2e70f9b5ae9167f6fbf59264f226d9db99d48f70466ab2f216b8\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" podUID="e36bfc63-943e-49f5-ab0b-021474292dc7" Dec 08 21:38:18 crc kubenswrapper[4912]: E1208 21:38:18.434567 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" podUID="ef95618f-ec2f-438b-ba4f-15cb5e057b6e" Dec 08 21:38:22 crc kubenswrapper[4912]: I1208 21:38:22.779753 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" event={"ID":"cbe56e26-bee2-4664-abc8-2d7ff76aa32e","Type":"ContainerStarted","Data":"4892185d6e432efcc6ef1ee94a4f238dc68d027ee9dd021276896267081525f8"} Dec 08 21:38:22 crc kubenswrapper[4912]: I1208 21:38:22.781100 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:38:22 crc kubenswrapper[4912]: I1208 21:38:22.811926 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" podStartSLOduration=18.462409994 podStartE2EDuration="58.811905488s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:41.52937726 +0000 UTC m=+1143.392379343" lastFinishedPulling="2025-12-08 21:38:21.878872754 +0000 UTC m=+1183.741874837" observedRunningTime="2025-12-08 21:38:22.80891407 +0000 UTC m=+1184.671916163" watchObservedRunningTime="2025-12-08 21:38:22.811905488 +0000 UTC m=+1184.674907571" Dec 08 21:38:30 crc kubenswrapper[4912]: I1208 21:38:30.792514 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f7p4sp" Dec 08 21:38:31 crc kubenswrapper[4912]: I1208 21:38:31.848050 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" event={"ID":"e36bfc63-943e-49f5-ab0b-021474292dc7","Type":"ContainerStarted","Data":"461eb13946b122f0ef7584502c348838f10e3f6f3f51770746299605a4fef0a5"} Dec 08 21:38:31 crc kubenswrapper[4912]: I1208 21:38:31.848482 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" Dec 08 21:38:31 crc kubenswrapper[4912]: I1208 21:38:31.871338 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" podStartSLOduration=3.273705393 podStartE2EDuration="1m7.871315133s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.233080722 +0000 UTC m=+1128.096082805" lastFinishedPulling="2025-12-08 21:38:30.830690462 +0000 UTC m=+1192.693692545" observedRunningTime="2025-12-08 21:38:31.864401213 +0000 UTC m=+1193.727403296" watchObservedRunningTime="2025-12-08 21:38:31.871315133 +0000 UTC m=+1193.734317216" Dec 08 21:38:32 crc kubenswrapper[4912]: I1208 21:38:32.965199 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:38:32 crc kubenswrapper[4912]: I1208 21:38:32.965279 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:38:35 crc kubenswrapper[4912]: I1208 21:38:35.334163 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-5twmg" Dec 08 21:38:39 crc kubenswrapper[4912]: I1208 21:38:39.913634 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" event={"ID":"9c67d467-660a-4bbc-a32c-b197db949502","Type":"ContainerStarted","Data":"144bc6b5cf39004cf5f7d56267c1718a7156de8dd4a8f533f53d5011f518ec34"} Dec 08 21:38:41 crc kubenswrapper[4912]: I1208 21:38:41.930549 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" event={"ID":"ef95618f-ec2f-438b-ba4f-15cb5e057b6e","Type":"ContainerStarted","Data":"291951c6c856a667e05922f5e20fe81bb80194d4b2495e708f2c61658da1d262"} Dec 08 21:38:42 crc kubenswrapper[4912]: I1208 21:38:42.937339 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" Dec 08 21:38:42 crc kubenswrapper[4912]: I1208 21:38:42.937406 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" Dec 08 21:38:42 crc kubenswrapper[4912]: I1208 21:38:42.960607 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" podStartSLOduration=6.885825602 podStartE2EDuration="1m18.960582752s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.219890509 +0000 UTC m=+1128.082892592" lastFinishedPulling="2025-12-08 21:38:38.294647659 +0000 UTC m=+1200.157649742" observedRunningTime="2025-12-08 21:38:42.955654034 +0000 UTC m=+1204.818656117" watchObservedRunningTime="2025-12-08 21:38:42.960582752 +0000 UTC m=+1204.823584875" Dec 08 21:38:42 crc kubenswrapper[4912]: I1208 21:38:42.981740 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" podStartSLOduration=6.903632654 podStartE2EDuration="1m18.981721701s" podCreationTimestamp="2025-12-08 21:37:24 +0000 UTC" firstStartedPulling="2025-12-08 21:37:26.21838353 +0000 UTC m=+1128.081385613" lastFinishedPulling="2025-12-08 21:38:38.296472577 +0000 UTC m=+1200.159474660" observedRunningTime="2025-12-08 21:38:42.978557719 +0000 UTC m=+1204.841559842" watchObservedRunningTime="2025-12-08 21:38:42.981721701 +0000 UTC m=+1204.844723774" Dec 08 21:38:45 crc kubenswrapper[4912]: I1208 21:38:45.194081 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zxmcp" Dec 08 21:38:54 crc kubenswrapper[4912]: I1208 21:38:54.866831 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-967d97867-fmxw5" Dec 08 21:39:02 crc kubenswrapper[4912]: I1208 21:39:02.965497 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:39:02 crc kubenswrapper[4912]: I1208 21:39:02.965971 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.712953 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-lv2qk"] Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.715011 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.718796 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-8xwcb" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.718924 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.719201 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.719384 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.736634 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-lv2qk"] Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.786429 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mbt2t"] Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.791013 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.795857 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.797348 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mbt2t"] Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.837588 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5td8\" (UniqueName: \"kubernetes.io/projected/297c22aa-80bf-4fb8-a839-0e16c05c2bf3-kube-api-access-b5td8\") pod \"dnsmasq-dns-675f4bcbfc-lv2qk\" (UID: \"297c22aa-80bf-4fb8-a839-0e16c05c2bf3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.837666 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/297c22aa-80bf-4fb8-a839-0e16c05c2bf3-config\") pod \"dnsmasq-dns-675f4bcbfc-lv2qk\" (UID: \"297c22aa-80bf-4fb8-a839-0e16c05c2bf3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.938776 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75tvb\" (UniqueName: \"kubernetes.io/projected/5466cd78-c75d-4ef9-b6fb-f25fdc202514-kube-api-access-75tvb\") pod \"dnsmasq-dns-78dd6ddcc-mbt2t\" (UID: \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.938905 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5td8\" (UniqueName: \"kubernetes.io/projected/297c22aa-80bf-4fb8-a839-0e16c05c2bf3-kube-api-access-b5td8\") pod \"dnsmasq-dns-675f4bcbfc-lv2qk\" (UID: \"297c22aa-80bf-4fb8-a839-0e16c05c2bf3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.938941 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5466cd78-c75d-4ef9-b6fb-f25fdc202514-config\") pod \"dnsmasq-dns-78dd6ddcc-mbt2t\" (UID: \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.939002 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/297c22aa-80bf-4fb8-a839-0e16c05c2bf3-config\") pod \"dnsmasq-dns-675f4bcbfc-lv2qk\" (UID: \"297c22aa-80bf-4fb8-a839-0e16c05c2bf3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.939767 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5466cd78-c75d-4ef9-b6fb-f25fdc202514-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-mbt2t\" (UID: \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.940476 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/297c22aa-80bf-4fb8-a839-0e16c05c2bf3-config\") pod \"dnsmasq-dns-675f4bcbfc-lv2qk\" (UID: \"297c22aa-80bf-4fb8-a839-0e16c05c2bf3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" Dec 08 21:39:13 crc kubenswrapper[4912]: I1208 21:39:13.963642 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5td8\" (UniqueName: \"kubernetes.io/projected/297c22aa-80bf-4fb8-a839-0e16c05c2bf3-kube-api-access-b5td8\") pod \"dnsmasq-dns-675f4bcbfc-lv2qk\" (UID: \"297c22aa-80bf-4fb8-a839-0e16c05c2bf3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" Dec 08 21:39:14 crc kubenswrapper[4912]: I1208 21:39:14.041393 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5466cd78-c75d-4ef9-b6fb-f25fdc202514-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-mbt2t\" (UID: \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:14 crc kubenswrapper[4912]: I1208 21:39:14.041792 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75tvb\" (UniqueName: \"kubernetes.io/projected/5466cd78-c75d-4ef9-b6fb-f25fdc202514-kube-api-access-75tvb\") pod \"dnsmasq-dns-78dd6ddcc-mbt2t\" (UID: \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:14 crc kubenswrapper[4912]: I1208 21:39:14.041837 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5466cd78-c75d-4ef9-b6fb-f25fdc202514-config\") pod \"dnsmasq-dns-78dd6ddcc-mbt2t\" (UID: \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:14 crc kubenswrapper[4912]: I1208 21:39:14.042380 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5466cd78-c75d-4ef9-b6fb-f25fdc202514-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-mbt2t\" (UID: \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:14 crc kubenswrapper[4912]: I1208 21:39:14.042644 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5466cd78-c75d-4ef9-b6fb-f25fdc202514-config\") pod \"dnsmasq-dns-78dd6ddcc-mbt2t\" (UID: \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:14 crc kubenswrapper[4912]: I1208 21:39:14.058612 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75tvb\" (UniqueName: \"kubernetes.io/projected/5466cd78-c75d-4ef9-b6fb-f25fdc202514-kube-api-access-75tvb\") pod \"dnsmasq-dns-78dd6ddcc-mbt2t\" (UID: \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:14 crc kubenswrapper[4912]: I1208 21:39:14.062250 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" Dec 08 21:39:14 crc kubenswrapper[4912]: I1208 21:39:14.118523 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:14 crc kubenswrapper[4912]: I1208 21:39:14.700093 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-lv2qk"] Dec 08 21:39:15 crc kubenswrapper[4912]: W1208 21:39:15.169242 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod297c22aa_80bf_4fb8_a839_0e16c05c2bf3.slice/crio-3eef3768f07e93dca2053331c9b66191365327bf773df508d6b71f361f77c5be WatchSource:0}: Error finding container 3eef3768f07e93dca2053331c9b66191365327bf773df508d6b71f361f77c5be: Status 404 returned error can't find the container with id 3eef3768f07e93dca2053331c9b66191365327bf773df508d6b71f361f77c5be Dec 08 21:39:15 crc kubenswrapper[4912]: I1208 21:39:15.195181 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mbt2t"] Dec 08 21:39:15 crc kubenswrapper[4912]: I1208 21:39:15.231557 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" event={"ID":"5466cd78-c75d-4ef9-b6fb-f25fdc202514","Type":"ContainerStarted","Data":"fcc0c4026d1c435cdbcca243d2b1732b2c12d9323b39bce531bc08ae746c04ef"} Dec 08 21:39:15 crc kubenswrapper[4912]: I1208 21:39:15.232534 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" event={"ID":"297c22aa-80bf-4fb8-a839-0e16c05c2bf3","Type":"ContainerStarted","Data":"3eef3768f07e93dca2053331c9b66191365327bf773df508d6b71f361f77c5be"} Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.751575 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-lv2qk"] Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.779578 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7p4lq"] Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.781239 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.797795 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7p4lq"] Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.812123 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/000f4539-6aeb-4701-9964-d94c58cf9154-config\") pod \"dnsmasq-dns-666b6646f7-7p4lq\" (UID: \"000f4539-6aeb-4701-9964-d94c58cf9154\") " pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.812230 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bszcd\" (UniqueName: \"kubernetes.io/projected/000f4539-6aeb-4701-9964-d94c58cf9154-kube-api-access-bszcd\") pod \"dnsmasq-dns-666b6646f7-7p4lq\" (UID: \"000f4539-6aeb-4701-9964-d94c58cf9154\") " pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.812264 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/000f4539-6aeb-4701-9964-d94c58cf9154-dns-svc\") pod \"dnsmasq-dns-666b6646f7-7p4lq\" (UID: \"000f4539-6aeb-4701-9964-d94c58cf9154\") " pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.917249 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/000f4539-6aeb-4701-9964-d94c58cf9154-config\") pod \"dnsmasq-dns-666b6646f7-7p4lq\" (UID: \"000f4539-6aeb-4701-9964-d94c58cf9154\") " pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.917393 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bszcd\" (UniqueName: \"kubernetes.io/projected/000f4539-6aeb-4701-9964-d94c58cf9154-kube-api-access-bszcd\") pod \"dnsmasq-dns-666b6646f7-7p4lq\" (UID: \"000f4539-6aeb-4701-9964-d94c58cf9154\") " pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.917425 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/000f4539-6aeb-4701-9964-d94c58cf9154-dns-svc\") pod \"dnsmasq-dns-666b6646f7-7p4lq\" (UID: \"000f4539-6aeb-4701-9964-d94c58cf9154\") " pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.918706 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/000f4539-6aeb-4701-9964-d94c58cf9154-dns-svc\") pod \"dnsmasq-dns-666b6646f7-7p4lq\" (UID: \"000f4539-6aeb-4701-9964-d94c58cf9154\") " pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.918809 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/000f4539-6aeb-4701-9964-d94c58cf9154-config\") pod \"dnsmasq-dns-666b6646f7-7p4lq\" (UID: \"000f4539-6aeb-4701-9964-d94c58cf9154\") " pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:39:16 crc kubenswrapper[4912]: I1208 21:39:16.944161 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bszcd\" (UniqueName: \"kubernetes.io/projected/000f4539-6aeb-4701-9964-d94c58cf9154-kube-api-access-bszcd\") pod \"dnsmasq-dns-666b6646f7-7p4lq\" (UID: \"000f4539-6aeb-4701-9964-d94c58cf9154\") " pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.109558 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mbt2t"] Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.123042 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.137354 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-xgtpp"] Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.138702 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.157822 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-xgtpp"] Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.232902 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d48c15b-83a3-497c-bb82-7b000b9494f2-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-xgtpp\" (UID: \"4d48c15b-83a3-497c-bb82-7b000b9494f2\") " pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.233084 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d48c15b-83a3-497c-bb82-7b000b9494f2-config\") pod \"dnsmasq-dns-57d769cc4f-xgtpp\" (UID: \"4d48c15b-83a3-497c-bb82-7b000b9494f2\") " pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.233256 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6nj8\" (UniqueName: \"kubernetes.io/projected/4d48c15b-83a3-497c-bb82-7b000b9494f2-kube-api-access-x6nj8\") pod \"dnsmasq-dns-57d769cc4f-xgtpp\" (UID: \"4d48c15b-83a3-497c-bb82-7b000b9494f2\") " pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.334726 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6nj8\" (UniqueName: \"kubernetes.io/projected/4d48c15b-83a3-497c-bb82-7b000b9494f2-kube-api-access-x6nj8\") pod \"dnsmasq-dns-57d769cc4f-xgtpp\" (UID: \"4d48c15b-83a3-497c-bb82-7b000b9494f2\") " pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.334861 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d48c15b-83a3-497c-bb82-7b000b9494f2-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-xgtpp\" (UID: \"4d48c15b-83a3-497c-bb82-7b000b9494f2\") " pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.334907 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d48c15b-83a3-497c-bb82-7b000b9494f2-config\") pod \"dnsmasq-dns-57d769cc4f-xgtpp\" (UID: \"4d48c15b-83a3-497c-bb82-7b000b9494f2\") " pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.336518 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d48c15b-83a3-497c-bb82-7b000b9494f2-config\") pod \"dnsmasq-dns-57d769cc4f-xgtpp\" (UID: \"4d48c15b-83a3-497c-bb82-7b000b9494f2\") " pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.338101 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d48c15b-83a3-497c-bb82-7b000b9494f2-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-xgtpp\" (UID: \"4d48c15b-83a3-497c-bb82-7b000b9494f2\") " pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.394783 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6nj8\" (UniqueName: \"kubernetes.io/projected/4d48c15b-83a3-497c-bb82-7b000b9494f2-kube-api-access-x6nj8\") pod \"dnsmasq-dns-57d769cc4f-xgtpp\" (UID: \"4d48c15b-83a3-497c-bb82-7b000b9494f2\") " pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.479460 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.808301 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7p4lq"] Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.944554 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.945940 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.949619 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.950252 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.950396 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.950601 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.950728 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-pgzmq" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.954194 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.957030 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 08 21:39:17 crc kubenswrapper[4912]: I1208 21:39:17.962931 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.181892 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/621df657-49db-4768-8ad5-6676531990d4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.181943 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/621df657-49db-4768-8ad5-6676531990d4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.181973 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/621df657-49db-4768-8ad5-6676531990d4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.182001 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/621df657-49db-4768-8ad5-6676531990d4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.182020 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/621df657-49db-4768-8ad5-6676531990d4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.182060 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c3f15499-3bf4-48da-aa76-046252838b3d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c3f15499-3bf4-48da-aa76-046252838b3d\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.182098 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/621df657-49db-4768-8ad5-6676531990d4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.182121 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5d4z5\" (UniqueName: \"kubernetes.io/projected/621df657-49db-4768-8ad5-6676531990d4-kube-api-access-5d4z5\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.182150 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/621df657-49db-4768-8ad5-6676531990d4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.182180 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/621df657-49db-4768-8ad5-6676531990d4-config-data\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.182372 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/621df657-49db-4768-8ad5-6676531990d4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.215012 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-xgtpp"] Dec 08 21:39:18 crc kubenswrapper[4912]: W1208 21:39:18.225731 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d48c15b_83a3_497c_bb82_7b000b9494f2.slice/crio-78cbac5f15dfb9f6517d52fd0e81d00a78e6828694606a7dce04127874e9530b WatchSource:0}: Error finding container 78cbac5f15dfb9f6517d52fd0e81d00a78e6828694606a7dce04127874e9530b: Status 404 returned error can't find the container with id 78cbac5f15dfb9f6517d52fd0e81d00a78e6828694606a7dce04127874e9530b Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.267719 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.268997 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.272353 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.272540 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.272730 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-kdbqv" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.272846 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.273032 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.273174 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.273592 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.283818 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.284784 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/621df657-49db-4768-8ad5-6676531990d4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.284825 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/621df657-49db-4768-8ad5-6676531990d4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.284852 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/621df657-49db-4768-8ad5-6676531990d4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.284873 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/621df657-49db-4768-8ad5-6676531990d4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.284887 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/621df657-49db-4768-8ad5-6676531990d4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.284910 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c3f15499-3bf4-48da-aa76-046252838b3d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c3f15499-3bf4-48da-aa76-046252838b3d\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.284942 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/621df657-49db-4768-8ad5-6676531990d4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.284960 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5d4z5\" (UniqueName: \"kubernetes.io/projected/621df657-49db-4768-8ad5-6676531990d4-kube-api-access-5d4z5\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.284986 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/621df657-49db-4768-8ad5-6676531990d4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.285011 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/621df657-49db-4768-8ad5-6676531990d4-config-data\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.285040 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/621df657-49db-4768-8ad5-6676531990d4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.286630 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/621df657-49db-4768-8ad5-6676531990d4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.289180 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/621df657-49db-4768-8ad5-6676531990d4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.290151 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/621df657-49db-4768-8ad5-6676531990d4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.291139 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/621df657-49db-4768-8ad5-6676531990d4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.292002 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/621df657-49db-4768-8ad5-6676531990d4-config-data\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.297532 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/621df657-49db-4768-8ad5-6676531990d4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.298438 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.298467 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c3f15499-3bf4-48da-aa76-046252838b3d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c3f15499-3bf4-48da-aa76-046252838b3d\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/03e3fd6aad5adce2521c69aa5117e7ba693f8b26d422d100118babe7401a93ed/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.299323 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/621df657-49db-4768-8ad5-6676531990d4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.305096 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/621df657-49db-4768-8ad5-6676531990d4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.311823 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5d4z5\" (UniqueName: \"kubernetes.io/projected/621df657-49db-4768-8ad5-6676531990d4-kube-api-access-5d4z5\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.316309 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/621df657-49db-4768-8ad5-6676531990d4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.323969 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" event={"ID":"000f4539-6aeb-4701-9964-d94c58cf9154","Type":"ContainerStarted","Data":"1d4c406dbf980bccce7961a8d24b3e22653e4635ea230abb6ed9c4cbbbd678bd"} Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.335057 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" event={"ID":"4d48c15b-83a3-497c-bb82-7b000b9494f2","Type":"ContainerStarted","Data":"78cbac5f15dfb9f6517d52fd0e81d00a78e6828694606a7dce04127874e9530b"} Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.382807 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c3f15499-3bf4-48da-aa76-046252838b3d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c3f15499-3bf4-48da-aa76-046252838b3d\") pod \"rabbitmq-server-0\" (UID: \"621df657-49db-4768-8ad5-6676531990d4\") " pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.387192 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f725cb12-94d8-42af-9930-d1d8a17ae9a7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.387246 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f725cb12-94d8-42af-9930-d1d8a17ae9a7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.387282 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f725cb12-94d8-42af-9930-d1d8a17ae9a7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.387305 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f725cb12-94d8-42af-9930-d1d8a17ae9a7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.387455 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f725cb12-94d8-42af-9930-d1d8a17ae9a7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.387473 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8jgq\" (UniqueName: \"kubernetes.io/projected/f725cb12-94d8-42af-9930-d1d8a17ae9a7-kube-api-access-q8jgq\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.387494 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b3175427-7a5b-4063-9d91-96dfe5849881\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3175427-7a5b-4063-9d91-96dfe5849881\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.387530 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f725cb12-94d8-42af-9930-d1d8a17ae9a7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.387547 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f725cb12-94d8-42af-9930-d1d8a17ae9a7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.387570 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f725cb12-94d8-42af-9930-d1d8a17ae9a7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.387639 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f725cb12-94d8-42af-9930-d1d8a17ae9a7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.489009 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f725cb12-94d8-42af-9930-d1d8a17ae9a7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.489083 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f725cb12-94d8-42af-9930-d1d8a17ae9a7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.489148 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f725cb12-94d8-42af-9930-d1d8a17ae9a7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.489167 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8jgq\" (UniqueName: \"kubernetes.io/projected/f725cb12-94d8-42af-9930-d1d8a17ae9a7-kube-api-access-q8jgq\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.489191 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b3175427-7a5b-4063-9d91-96dfe5849881\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3175427-7a5b-4063-9d91-96dfe5849881\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.489336 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f725cb12-94d8-42af-9930-d1d8a17ae9a7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.489356 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f725cb12-94d8-42af-9930-d1d8a17ae9a7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.489378 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f725cb12-94d8-42af-9930-d1d8a17ae9a7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.489429 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f725cb12-94d8-42af-9930-d1d8a17ae9a7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.489454 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f725cb12-94d8-42af-9930-d1d8a17ae9a7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.489502 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f725cb12-94d8-42af-9930-d1d8a17ae9a7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.490185 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f725cb12-94d8-42af-9930-d1d8a17ae9a7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.492375 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.492402 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b3175427-7a5b-4063-9d91-96dfe5849881\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3175427-7a5b-4063-9d91-96dfe5849881\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e81f55e66458eb12311b1d49dfa512923b7375f043f75e22536521ff98714fce/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.495249 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f725cb12-94d8-42af-9930-d1d8a17ae9a7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.504974 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f725cb12-94d8-42af-9930-d1d8a17ae9a7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.505611 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f725cb12-94d8-42af-9930-d1d8a17ae9a7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.509522 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f725cb12-94d8-42af-9930-d1d8a17ae9a7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.514773 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f725cb12-94d8-42af-9930-d1d8a17ae9a7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.516577 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f725cb12-94d8-42af-9930-d1d8a17ae9a7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.518308 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f725cb12-94d8-42af-9930-d1d8a17ae9a7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.521560 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f725cb12-94d8-42af-9930-d1d8a17ae9a7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.531915 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8jgq\" (UniqueName: \"kubernetes.io/projected/f725cb12-94d8-42af-9930-d1d8a17ae9a7-kube-api-access-q8jgq\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.553590 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b3175427-7a5b-4063-9d91-96dfe5849881\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3175427-7a5b-4063-9d91-96dfe5849881\") pod \"rabbitmq-cell1-server-0\" (UID: \"f725cb12-94d8-42af-9930-d1d8a17ae9a7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.612853 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 08 21:39:18 crc kubenswrapper[4912]: I1208 21:39:18.678511 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.341161 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.414972 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 08 21:39:19 crc kubenswrapper[4912]: W1208 21:39:19.449282 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf725cb12_94d8_42af_9930_d1d8a17ae9a7.slice/crio-624998a704aa9136210f973d2c6d84250b05dd7b63edc1dd8427d0221d4d537b WatchSource:0}: Error finding container 624998a704aa9136210f973d2c6d84250b05dd7b63edc1dd8427d0221d4d537b: Status 404 returned error can't find the container with id 624998a704aa9136210f973d2c6d84250b05dd7b63edc1dd8427d0221d4d537b Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.664083 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.670322 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.672844 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.673657 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.673793 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-bv2sh" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.674118 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.684652 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.695634 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.837872 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.837982 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.838035 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-config-data-generated\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.838123 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-operator-scripts\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.838159 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-30ea7c0a-7405-403f-b680-b12cf685a935\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-30ea7c0a-7405-403f-b680-b12cf685a935\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.838223 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-862dq\" (UniqueName: \"kubernetes.io/projected/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-kube-api-access-862dq\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.838258 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-kolla-config\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.838301 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-config-data-default\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.939656 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.939715 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.939739 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-config-data-generated\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.939755 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-operator-scripts\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.939777 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-30ea7c0a-7405-403f-b680-b12cf685a935\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-30ea7c0a-7405-403f-b680-b12cf685a935\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.939814 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-862dq\" (UniqueName: \"kubernetes.io/projected/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-kube-api-access-862dq\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.939833 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-kolla-config\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.939857 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-config-data-default\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.940842 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-config-data-default\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.942141 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-config-data-generated\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.942779 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-kolla-config\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.943642 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-operator-scripts\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.946647 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.947665 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.947696 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-30ea7c0a-7405-403f-b680-b12cf685a935\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-30ea7c0a-7405-403f-b680-b12cf685a935\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0b15a261181c0f719ce79ab101b64a997df1703c1ccce975e2fe97484554194c/globalmount\"" pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.951185 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:19 crc kubenswrapper[4912]: I1208 21:39:19.957646 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-862dq\" (UniqueName: \"kubernetes.io/projected/914d6cd9-3b13-4f31-bed5-aaf5c553cea9-kube-api-access-862dq\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:20 crc kubenswrapper[4912]: I1208 21:39:20.020321 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-30ea7c0a-7405-403f-b680-b12cf685a935\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-30ea7c0a-7405-403f-b680-b12cf685a935\") pod \"openstack-galera-0\" (UID: \"914d6cd9-3b13-4f31-bed5-aaf5c553cea9\") " pod="openstack/openstack-galera-0" Dec 08 21:39:20 crc kubenswrapper[4912]: I1208 21:39:20.308577 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 08 21:39:20 crc kubenswrapper[4912]: I1208 21:39:20.418389 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f725cb12-94d8-42af-9930-d1d8a17ae9a7","Type":"ContainerStarted","Data":"624998a704aa9136210f973d2c6d84250b05dd7b63edc1dd8427d0221d4d537b"} Dec 08 21:39:20 crc kubenswrapper[4912]: I1208 21:39:20.424125 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"621df657-49db-4768-8ad5-6676531990d4","Type":"ContainerStarted","Data":"265bb2aec1db58c624d10716fc5cd85c57b0b10e014f5febb8f7f52cb1f90794"} Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.028244 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.032436 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.039325 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.039510 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.039811 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.039911 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-mc68z" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.057719 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.122839 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.163615 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/35406345-ffad-4596-b323-22e156a4e481-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.164091 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35406345-ffad-4596-b323-22e156a4e481-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.164141 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/35406345-ffad-4596-b323-22e156a4e481-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.164202 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/35406345-ffad-4596-b323-22e156a4e481-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.164253 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-32fa7c93-9d63-4679-983e-5f2f074e5c79\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32fa7c93-9d63-4679-983e-5f2f074e5c79\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.164293 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2l49\" (UniqueName: \"kubernetes.io/projected/35406345-ffad-4596-b323-22e156a4e481-kube-api-access-v2l49\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.164588 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35406345-ffad-4596-b323-22e156a4e481-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.164656 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/35406345-ffad-4596-b323-22e156a4e481-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.265711 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35406345-ffad-4596-b323-22e156a4e481-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.265760 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/35406345-ffad-4596-b323-22e156a4e481-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.265797 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/35406345-ffad-4596-b323-22e156a4e481-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.265827 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35406345-ffad-4596-b323-22e156a4e481-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.265846 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/35406345-ffad-4596-b323-22e156a4e481-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.265878 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/35406345-ffad-4596-b323-22e156a4e481-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.265901 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-32fa7c93-9d63-4679-983e-5f2f074e5c79\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32fa7c93-9d63-4679-983e-5f2f074e5c79\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.265919 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2l49\" (UniqueName: \"kubernetes.io/projected/35406345-ffad-4596-b323-22e156a4e481-kube-api-access-v2l49\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.267694 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/35406345-ffad-4596-b323-22e156a4e481-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.267967 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/35406345-ffad-4596-b323-22e156a4e481-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.268160 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/35406345-ffad-4596-b323-22e156a4e481-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.269304 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35406345-ffad-4596-b323-22e156a4e481-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.272088 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/35406345-ffad-4596-b323-22e156a4e481-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.282735 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35406345-ffad-4596-b323-22e156a4e481-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.286557 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.287834 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.292819 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.293034 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-l7fj8" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.293340 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.293765 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.293812 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-32fa7c93-9d63-4679-983e-5f2f074e5c79\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32fa7c93-9d63-4679-983e-5f2f074e5c79\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b0edd9a8fceb55027c5ea9ecd50f65117060886ca2feca8f09a6d1c2072fdc2e/globalmount\"" pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.315515 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2l49\" (UniqueName: \"kubernetes.io/projected/35406345-ffad-4596-b323-22e156a4e481-kube-api-access-v2l49\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.360110 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.368796 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a8e59b7-40e5-4c2e-aead-21245661c02e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.368857 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a8e59b7-40e5-4c2e-aead-21245661c02e-config-data\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.368885 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0a8e59b7-40e5-4c2e-aead-21245661c02e-kolla-config\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.368907 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trdjx\" (UniqueName: \"kubernetes.io/projected/0a8e59b7-40e5-4c2e-aead-21245661c02e-kube-api-access-trdjx\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.368980 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a8e59b7-40e5-4c2e-aead-21245661c02e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.393757 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-32fa7c93-9d63-4679-983e-5f2f074e5c79\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32fa7c93-9d63-4679-983e-5f2f074e5c79\") pod \"openstack-cell1-galera-0\" (UID: \"35406345-ffad-4596-b323-22e156a4e481\") " pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.443234 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"914d6cd9-3b13-4f31-bed5-aaf5c553cea9","Type":"ContainerStarted","Data":"36f5d74563860938cb1cdf83f0b5f9e4b869da25b657766150bf5428de58c5af"} Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.472790 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a8e59b7-40e5-4c2e-aead-21245661c02e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.473033 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a8e59b7-40e5-4c2e-aead-21245661c02e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.473625 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a8e59b7-40e5-4c2e-aead-21245661c02e-config-data\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.473708 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0a8e59b7-40e5-4c2e-aead-21245661c02e-kolla-config\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.474093 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trdjx\" (UniqueName: \"kubernetes.io/projected/0a8e59b7-40e5-4c2e-aead-21245661c02e-kube-api-access-trdjx\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.474912 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a8e59b7-40e5-4c2e-aead-21245661c02e-config-data\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.480648 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0a8e59b7-40e5-4c2e-aead-21245661c02e-kolla-config\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.484768 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a8e59b7-40e5-4c2e-aead-21245661c02e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.487428 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a8e59b7-40e5-4c2e-aead-21245661c02e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.504822 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trdjx\" (UniqueName: \"kubernetes.io/projected/0a8e59b7-40e5-4c2e-aead-21245661c02e-kube-api-access-trdjx\") pod \"memcached-0\" (UID: \"0a8e59b7-40e5-4c2e-aead-21245661c02e\") " pod="openstack/memcached-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.688889 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 08 21:39:21 crc kubenswrapper[4912]: I1208 21:39:21.708766 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 08 21:39:22 crc kubenswrapper[4912]: I1208 21:39:22.336634 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 08 21:39:22 crc kubenswrapper[4912]: I1208 21:39:22.347663 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 08 21:39:22 crc kubenswrapper[4912]: I1208 21:39:22.454283 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"35406345-ffad-4596-b323-22e156a4e481","Type":"ContainerStarted","Data":"810ee84d11bed8e0cefdd96daf9620ff2fa4d2638e3b806b1baa5590f0080ca5"} Dec 08 21:39:22 crc kubenswrapper[4912]: I1208 21:39:22.469364 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"0a8e59b7-40e5-4c2e-aead-21245661c02e","Type":"ContainerStarted","Data":"5ba153e1e79db8208023f2c37ca56e78b74265161fb5003a107f0af92871fde4"} Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.339961 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-vwsgt"] Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.341284 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.343213 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.343451 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.344577 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-5z64x" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.361028 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-vwsgt"] Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.413412 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-dx49f"] Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.416689 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.455091 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-dx49f"] Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.505450 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0456360f-7543-4af2-ad73-07d0332d3ce2-combined-ca-bundle\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.505573 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5czls\" (UniqueName: \"kubernetes.io/projected/0456360f-7543-4af2-ad73-07d0332d3ce2-kube-api-access-5czls\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.505625 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0456360f-7543-4af2-ad73-07d0332d3ce2-scripts\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.505655 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0456360f-7543-4af2-ad73-07d0332d3ce2-var-log-ovn\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.505680 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0456360f-7543-4af2-ad73-07d0332d3ce2-ovn-controller-tls-certs\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.505704 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0456360f-7543-4af2-ad73-07d0332d3ce2-var-run\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.505731 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0456360f-7543-4af2-ad73-07d0332d3ce2-var-run-ovn\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.606986 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12a9d079-3756-4164-967c-be9bd7758724-scripts\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.607065 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0456360f-7543-4af2-ad73-07d0332d3ce2-var-log-ovn\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.607083 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0456360f-7543-4af2-ad73-07d0332d3ce2-ovn-controller-tls-certs\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.607103 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/12a9d079-3756-4164-967c-be9bd7758724-etc-ovs\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.607122 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0456360f-7543-4af2-ad73-07d0332d3ce2-var-run-ovn\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.607180 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0456360f-7543-4af2-ad73-07d0332d3ce2-combined-ca-bundle\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.607222 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5czls\" (UniqueName: \"kubernetes.io/projected/0456360f-7543-4af2-ad73-07d0332d3ce2-kube-api-access-5czls\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.607242 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7447\" (UniqueName: \"kubernetes.io/projected/12a9d079-3756-4164-967c-be9bd7758724-kube-api-access-w7447\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.607260 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/12a9d079-3756-4164-967c-be9bd7758724-var-log\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.607314 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0456360f-7543-4af2-ad73-07d0332d3ce2-scripts\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.607336 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0456360f-7543-4af2-ad73-07d0332d3ce2-var-run\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.607399 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/12a9d079-3756-4164-967c-be9bd7758724-var-lib\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.607432 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/12a9d079-3756-4164-967c-be9bd7758724-var-run\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.608004 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0456360f-7543-4af2-ad73-07d0332d3ce2-var-log-ovn\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.608941 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0456360f-7543-4af2-ad73-07d0332d3ce2-var-run-ovn\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.614634 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0456360f-7543-4af2-ad73-07d0332d3ce2-combined-ca-bundle\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.614634 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0456360f-7543-4af2-ad73-07d0332d3ce2-ovn-controller-tls-certs\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.616152 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0456360f-7543-4af2-ad73-07d0332d3ce2-var-run\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.617561 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0456360f-7543-4af2-ad73-07d0332d3ce2-scripts\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.636068 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5czls\" (UniqueName: \"kubernetes.io/projected/0456360f-7543-4af2-ad73-07d0332d3ce2-kube-api-access-5czls\") pod \"ovn-controller-vwsgt\" (UID: \"0456360f-7543-4af2-ad73-07d0332d3ce2\") " pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.670655 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.709060 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/12a9d079-3756-4164-967c-be9bd7758724-var-run\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.709119 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12a9d079-3756-4164-967c-be9bd7758724-scripts\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.709147 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/12a9d079-3756-4164-967c-be9bd7758724-etc-ovs\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.709174 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/12a9d079-3756-4164-967c-be9bd7758724-var-run\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.709236 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7447\" (UniqueName: \"kubernetes.io/projected/12a9d079-3756-4164-967c-be9bd7758724-kube-api-access-w7447\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.709262 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/12a9d079-3756-4164-967c-be9bd7758724-var-log\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.709337 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/12a9d079-3756-4164-967c-be9bd7758724-var-lib\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.709388 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/12a9d079-3756-4164-967c-be9bd7758724-etc-ovs\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.709477 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/12a9d079-3756-4164-967c-be9bd7758724-var-log\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.709584 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/12a9d079-3756-4164-967c-be9bd7758724-var-lib\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.711582 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12a9d079-3756-4164-967c-be9bd7758724-scripts\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:26 crc kubenswrapper[4912]: I1208 21:39:26.760751 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7447\" (UniqueName: \"kubernetes.io/projected/12a9d079-3756-4164-967c-be9bd7758724-kube-api-access-w7447\") pod \"ovn-controller-ovs-dx49f\" (UID: \"12a9d079-3756-4164-967c-be9bd7758724\") " pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:27 crc kubenswrapper[4912]: I1208 21:39:27.033516 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.441325 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.443167 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.450930 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.451201 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.451676 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.451814 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-mjmlp" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.452735 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.453745 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.584314 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a5a0512-8f42-46d4-8806-d410f3270705-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.584364 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7a5a0512-8f42-46d4-8806-d410f3270705-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.584614 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e6f8016e-fb5f-4db3-ae2e-a3666a61eccc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e6f8016e-fb5f-4db3-ae2e-a3666a61eccc\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.584744 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7a5a0512-8f42-46d4-8806-d410f3270705-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.584811 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a5a0512-8f42-46d4-8806-d410f3270705-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.584833 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4xjf\" (UniqueName: \"kubernetes.io/projected/7a5a0512-8f42-46d4-8806-d410f3270705-kube-api-access-z4xjf\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.584874 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a5a0512-8f42-46d4-8806-d410f3270705-config\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.585095 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a5a0512-8f42-46d4-8806-d410f3270705-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.686109 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.686462 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7a5a0512-8f42-46d4-8806-d410f3270705-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.686526 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a5a0512-8f42-46d4-8806-d410f3270705-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.686576 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4xjf\" (UniqueName: \"kubernetes.io/projected/7a5a0512-8f42-46d4-8806-d410f3270705-kube-api-access-z4xjf\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.686605 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a5a0512-8f42-46d4-8806-d410f3270705-config\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.686654 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a5a0512-8f42-46d4-8806-d410f3270705-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.686705 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a5a0512-8f42-46d4-8806-d410f3270705-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.686727 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7a5a0512-8f42-46d4-8806-d410f3270705-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.686780 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e6f8016e-fb5f-4db3-ae2e-a3666a61eccc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e6f8016e-fb5f-4db3-ae2e-a3666a61eccc\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.687778 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7a5a0512-8f42-46d4-8806-d410f3270705-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.688006 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.688771 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7a5a0512-8f42-46d4-8806-d410f3270705-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.689678 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a5a0512-8f42-46d4-8806-d410f3270705-config\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.690995 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.691322 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.691488 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-lpbzt" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.691703 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.692653 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a5a0512-8f42-46d4-8806-d410f3270705-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.694755 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a5a0512-8f42-46d4-8806-d410f3270705-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.695556 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.695585 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e6f8016e-fb5f-4db3-ae2e-a3666a61eccc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e6f8016e-fb5f-4db3-ae2e-a3666a61eccc\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2225350451e697ce5643526949e7c98fa16299ad431ab410f759febedc473f06/globalmount\"" pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.695614 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a5a0512-8f42-46d4-8806-d410f3270705-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.696131 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.741966 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4xjf\" (UniqueName: \"kubernetes.io/projected/7a5a0512-8f42-46d4-8806-d410f3270705-kube-api-access-z4xjf\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.779587 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e6f8016e-fb5f-4db3-ae2e-a3666a61eccc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e6f8016e-fb5f-4db3-ae2e-a3666a61eccc\") pod \"ovsdbserver-nb-0\" (UID: \"7a5a0512-8f42-46d4-8806-d410f3270705\") " pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.788865 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe2d86c8-7891-4421-b2c3-914ed8948d3c-config\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.788992 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe2d86c8-7891-4421-b2c3-914ed8948d3c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.789079 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fe2d86c8-7891-4421-b2c3-914ed8948d3c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.789155 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bslbf\" (UniqueName: \"kubernetes.io/projected/fe2d86c8-7891-4421-b2c3-914ed8948d3c-kube-api-access-bslbf\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.789237 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe2d86c8-7891-4421-b2c3-914ed8948d3c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.789311 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-97173abd-7a40-4307-b113-0ef368799b94\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-97173abd-7a40-4307-b113-0ef368799b94\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.789331 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe2d86c8-7891-4421-b2c3-914ed8948d3c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.789389 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe2d86c8-7891-4421-b2c3-914ed8948d3c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.890882 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-97173abd-7a40-4307-b113-0ef368799b94\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-97173abd-7a40-4307-b113-0ef368799b94\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.890945 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe2d86c8-7891-4421-b2c3-914ed8948d3c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.891008 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe2d86c8-7891-4421-b2c3-914ed8948d3c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.891097 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe2d86c8-7891-4421-b2c3-914ed8948d3c-config\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.891220 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe2d86c8-7891-4421-b2c3-914ed8948d3c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.891270 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fe2d86c8-7891-4421-b2c3-914ed8948d3c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.891310 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bslbf\" (UniqueName: \"kubernetes.io/projected/fe2d86c8-7891-4421-b2c3-914ed8948d3c-kube-api-access-bslbf\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.891372 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe2d86c8-7891-4421-b2c3-914ed8948d3c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.891843 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fe2d86c8-7891-4421-b2c3-914ed8948d3c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.892232 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe2d86c8-7891-4421-b2c3-914ed8948d3c-config\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.892614 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe2d86c8-7891-4421-b2c3-914ed8948d3c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.893117 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.893145 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-97173abd-7a40-4307-b113-0ef368799b94\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-97173abd-7a40-4307-b113-0ef368799b94\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/786d94f9fc66fa88e1b2d4b8bacee9d7937ecbb1d23fd57e3625a8677e376f25/globalmount\"" pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.895763 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe2d86c8-7891-4421-b2c3-914ed8948d3c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.897548 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe2d86c8-7891-4421-b2c3-914ed8948d3c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.899884 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe2d86c8-7891-4421-b2c3-914ed8948d3c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.913376 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bslbf\" (UniqueName: \"kubernetes.io/projected/fe2d86c8-7891-4421-b2c3-914ed8948d3c-kube-api-access-bslbf\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:30 crc kubenswrapper[4912]: I1208 21:39:30.934895 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-97173abd-7a40-4307-b113-0ef368799b94\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-97173abd-7a40-4307-b113-0ef368799b94\") pod \"ovsdbserver-sb-0\" (UID: \"fe2d86c8-7891-4421-b2c3-914ed8948d3c\") " pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:31 crc kubenswrapper[4912]: I1208 21:39:31.077416 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 08 21:39:31 crc kubenswrapper[4912]: I1208 21:39:31.102749 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 08 21:39:32 crc kubenswrapper[4912]: I1208 21:39:32.965135 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:39:32 crc kubenswrapper[4912]: I1208 21:39:32.965550 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:39:32 crc kubenswrapper[4912]: I1208 21:39:32.965593 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:39:32 crc kubenswrapper[4912]: I1208 21:39:32.966310 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2bbe83801f5e4f664de5ce4a79737a9126b08b32fb28e3b53cf865ffeb56f1e8"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:39:32 crc kubenswrapper[4912]: I1208 21:39:32.966367 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://2bbe83801f5e4f664de5ce4a79737a9126b08b32fb28e3b53cf865ffeb56f1e8" gracePeriod=600 Dec 08 21:39:34 crc kubenswrapper[4912]: I1208 21:39:34.606846 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="2bbe83801f5e4f664de5ce4a79737a9126b08b32fb28e3b53cf865ffeb56f1e8" exitCode=0 Dec 08 21:39:34 crc kubenswrapper[4912]: I1208 21:39:34.607991 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"2bbe83801f5e4f664de5ce4a79737a9126b08b32fb28e3b53cf865ffeb56f1e8"} Dec 08 21:39:34 crc kubenswrapper[4912]: I1208 21:39:34.608060 4912 scope.go:117] "RemoveContainer" containerID="e561243c0a08b6a3c2b979f79371dccf1fa3fd8f2001bd69298ef149e0ac707f" Dec 08 21:39:46 crc kubenswrapper[4912]: E1208 21:39:46.512593 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 08 21:39:46 crc kubenswrapper[4912]: E1208 21:39:46.513343 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-862dq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(914d6cd9-3b13-4f31-bed5-aaf5c553cea9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:46 crc kubenswrapper[4912]: E1208 21:39:46.514593 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="914d6cd9-3b13-4f31-bed5-aaf5c553cea9" Dec 08 21:39:46 crc kubenswrapper[4912]: E1208 21:39:46.527246 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 08 21:39:46 crc kubenswrapper[4912]: E1208 21:39:46.527435 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v2l49,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(35406345-ffad-4596-b323-22e156a4e481): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:46 crc kubenswrapper[4912]: E1208 21:39:46.528640 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="35406345-ffad-4596-b323-22e156a4e481" Dec 08 21:39:46 crc kubenswrapper[4912]: E1208 21:39:46.727516 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="914d6cd9-3b13-4f31-bed5-aaf5c553cea9" Dec 08 21:39:46 crc kubenswrapper[4912]: E1208 21:39:46.731758 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="35406345-ffad-4596-b323-22e156a4e481" Dec 08 21:39:47 crc kubenswrapper[4912]: E1208 21:39:47.242982 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Dec 08 21:39:47 crc kubenswrapper[4912]: E1208 21:39:47.243233 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n8dh55bh684h548h556h56dh5dch554h568h688hb5h79h5f7h74h695h5d7h5fh567hc5hc8h554h57h95h594h8dh5bh654h5f6h7bh54bh5fchf9q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-trdjx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(0a8e59b7-40e5-4c2e-aead-21245661c02e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:47 crc kubenswrapper[4912]: E1208 21:39:47.244487 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="0a8e59b7-40e5-4c2e-aead-21245661c02e" Dec 08 21:39:47 crc kubenswrapper[4912]: E1208 21:39:47.736506 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="0a8e59b7-40e5-4c2e-aead-21245661c02e" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.040575 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.041063 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x6nj8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-xgtpp_openstack(4d48c15b-83a3-497c-bb82-7b000b9494f2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.042305 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" podUID="4d48c15b-83a3-497c-bb82-7b000b9494f2" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.050821 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.051106 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b5td8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-lv2qk_openstack(297c22aa-80bf-4fb8-a839-0e16c05c2bf3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.052382 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" podUID="297c22aa-80bf-4fb8-a839-0e16c05c2bf3" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.086303 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.086460 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-75tvb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-mbt2t_openstack(5466cd78-c75d-4ef9-b6fb-f25fdc202514): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.087933 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" podUID="5466cd78-c75d-4ef9-b6fb-f25fdc202514" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.231275 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.231736 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bszcd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-7p4lq_openstack(000f4539-6aeb-4701-9964-d94c58cf9154): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.233069 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" podUID="000f4539-6aeb-4701-9964-d94c58cf9154" Dec 08 21:39:48 crc kubenswrapper[4912]: I1208 21:39:48.650090 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-vwsgt"] Dec 08 21:39:48 crc kubenswrapper[4912]: I1208 21:39:48.737055 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 08 21:39:48 crc kubenswrapper[4912]: W1208 21:39:48.744249 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a5a0512_8f42_46d4_8806_d410f3270705.slice/crio-8a1f9e65e6d4f498e5881b53e5d8d8db3b4a559352ab6cbfe88f9915ccffabf8 WatchSource:0}: Error finding container 8a1f9e65e6d4f498e5881b53e5d8d8db3b4a559352ab6cbfe88f9915ccffabf8: Status 404 returned error can't find the container with id 8a1f9e65e6d4f498e5881b53e5d8d8db3b4a559352ab6cbfe88f9915ccffabf8 Dec 08 21:39:48 crc kubenswrapper[4912]: I1208 21:39:48.744471 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"bca8a47721443f47b11226277d892ecf92290a5aadbe5d8268e53db7e2821cf1"} Dec 08 21:39:48 crc kubenswrapper[4912]: I1208 21:39:48.748194 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-vwsgt" event={"ID":"0456360f-7543-4af2-ad73-07d0332d3ce2","Type":"ContainerStarted","Data":"7c45177d9510c47a66eacec66c1ba6165926c847e6fc47062e132f329da0f12e"} Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.748711 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" podUID="4d48c15b-83a3-497c-bb82-7b000b9494f2" Dec 08 21:39:48 crc kubenswrapper[4912]: E1208 21:39:48.748827 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" podUID="000f4539-6aeb-4701-9964-d94c58cf9154" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.338055 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.346498 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.401367 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75tvb\" (UniqueName: \"kubernetes.io/projected/5466cd78-c75d-4ef9-b6fb-f25fdc202514-kube-api-access-75tvb\") pod \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\" (UID: \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\") " Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.401786 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5td8\" (UniqueName: \"kubernetes.io/projected/297c22aa-80bf-4fb8-a839-0e16c05c2bf3-kube-api-access-b5td8\") pod \"297c22aa-80bf-4fb8-a839-0e16c05c2bf3\" (UID: \"297c22aa-80bf-4fb8-a839-0e16c05c2bf3\") " Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.401921 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5466cd78-c75d-4ef9-b6fb-f25fdc202514-dns-svc\") pod \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\" (UID: \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\") " Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.401994 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5466cd78-c75d-4ef9-b6fb-f25fdc202514-config\") pod \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\" (UID: \"5466cd78-c75d-4ef9-b6fb-f25fdc202514\") " Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.402064 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/297c22aa-80bf-4fb8-a839-0e16c05c2bf3-config\") pod \"297c22aa-80bf-4fb8-a839-0e16c05c2bf3\" (UID: \"297c22aa-80bf-4fb8-a839-0e16c05c2bf3\") " Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.403419 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5466cd78-c75d-4ef9-b6fb-f25fdc202514-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5466cd78-c75d-4ef9-b6fb-f25fdc202514" (UID: "5466cd78-c75d-4ef9-b6fb-f25fdc202514"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.403480 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/297c22aa-80bf-4fb8-a839-0e16c05c2bf3-config" (OuterVolumeSpecName: "config") pod "297c22aa-80bf-4fb8-a839-0e16c05c2bf3" (UID: "297c22aa-80bf-4fb8-a839-0e16c05c2bf3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.403792 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5466cd78-c75d-4ef9-b6fb-f25fdc202514-config" (OuterVolumeSpecName: "config") pod "5466cd78-c75d-4ef9-b6fb-f25fdc202514" (UID: "5466cd78-c75d-4ef9-b6fb-f25fdc202514"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.407464 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5466cd78-c75d-4ef9-b6fb-f25fdc202514-kube-api-access-75tvb" (OuterVolumeSpecName: "kube-api-access-75tvb") pod "5466cd78-c75d-4ef9-b6fb-f25fdc202514" (UID: "5466cd78-c75d-4ef9-b6fb-f25fdc202514"). InnerVolumeSpecName "kube-api-access-75tvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.409302 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-dx49f"] Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.417633 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/297c22aa-80bf-4fb8-a839-0e16c05c2bf3-kube-api-access-b5td8" (OuterVolumeSpecName: "kube-api-access-b5td8") pod "297c22aa-80bf-4fb8-a839-0e16c05c2bf3" (UID: "297c22aa-80bf-4fb8-a839-0e16c05c2bf3"). InnerVolumeSpecName "kube-api-access-b5td8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:39:49 crc kubenswrapper[4912]: W1208 21:39:49.420726 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12a9d079_3756_4164_967c_be9bd7758724.slice/crio-010289930dd29e2d87f51953336f74f1c5cb8722f4312787055b2a7d7ca927a9 WatchSource:0}: Error finding container 010289930dd29e2d87f51953336f74f1c5cb8722f4312787055b2a7d7ca927a9: Status 404 returned error can't find the container with id 010289930dd29e2d87f51953336f74f1c5cb8722f4312787055b2a7d7ca927a9 Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.504443 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5466cd78-c75d-4ef9-b6fb-f25fdc202514-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.504475 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/297c22aa-80bf-4fb8-a839-0e16c05c2bf3-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.504484 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75tvb\" (UniqueName: \"kubernetes.io/projected/5466cd78-c75d-4ef9-b6fb-f25fdc202514-kube-api-access-75tvb\") on node \"crc\" DevicePath \"\"" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.504496 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5td8\" (UniqueName: \"kubernetes.io/projected/297c22aa-80bf-4fb8-a839-0e16c05c2bf3-kube-api-access-b5td8\") on node \"crc\" DevicePath \"\"" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.504505 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5466cd78-c75d-4ef9-b6fb-f25fdc202514-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.757392 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"7a5a0512-8f42-46d4-8806-d410f3270705","Type":"ContainerStarted","Data":"8a1f9e65e6d4f498e5881b53e5d8d8db3b4a559352ab6cbfe88f9915ccffabf8"} Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.758842 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f725cb12-94d8-42af-9930-d1d8a17ae9a7","Type":"ContainerStarted","Data":"b7a7b8152abd3fe3d052ba480d146364636edc6a817cd5796935fc522140ae51"} Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.760485 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"621df657-49db-4768-8ad5-6676531990d4","Type":"ContainerStarted","Data":"52889e2d0bfeb7bfdb67e9cad760c502263cf08154372116b91e024fe55874ef"} Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.762370 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.762373 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-lv2qk" event={"ID":"297c22aa-80bf-4fb8-a839-0e16c05c2bf3","Type":"ContainerDied","Data":"3eef3768f07e93dca2053331c9b66191365327bf773df508d6b71f361f77c5be"} Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.763929 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" event={"ID":"5466cd78-c75d-4ef9-b6fb-f25fdc202514","Type":"ContainerDied","Data":"fcc0c4026d1c435cdbcca243d2b1732b2c12d9323b39bce531bc08ae746c04ef"} Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.764120 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mbt2t" Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.766838 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dx49f" event={"ID":"12a9d079-3756-4164-967c-be9bd7758724","Type":"ContainerStarted","Data":"010289930dd29e2d87f51953336f74f1c5cb8722f4312787055b2a7d7ca927a9"} Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.862814 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mbt2t"] Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.874772 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mbt2t"] Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.890062 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-lv2qk"] Dec 08 21:39:49 crc kubenswrapper[4912]: I1208 21:39:49.898904 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-lv2qk"] Dec 08 21:39:50 crc kubenswrapper[4912]: W1208 21:39:50.274701 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe2d86c8_7891_4421_b2c3_914ed8948d3c.slice/crio-97cf5d8555448267da340a561f758bfebab0b45b752c2e376fb5db20319922e8 WatchSource:0}: Error finding container 97cf5d8555448267da340a561f758bfebab0b45b752c2e376fb5db20319922e8: Status 404 returned error can't find the container with id 97cf5d8555448267da340a561f758bfebab0b45b752c2e376fb5db20319922e8 Dec 08 21:39:50 crc kubenswrapper[4912]: I1208 21:39:50.275317 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 08 21:39:50 crc kubenswrapper[4912]: I1208 21:39:50.438557 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="297c22aa-80bf-4fb8-a839-0e16c05c2bf3" path="/var/lib/kubelet/pods/297c22aa-80bf-4fb8-a839-0e16c05c2bf3/volumes" Dec 08 21:39:50 crc kubenswrapper[4912]: I1208 21:39:50.438926 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5466cd78-c75d-4ef9-b6fb-f25fdc202514" path="/var/lib/kubelet/pods/5466cd78-c75d-4ef9-b6fb-f25fdc202514/volumes" Dec 08 21:39:50 crc kubenswrapper[4912]: I1208 21:39:50.776830 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"fe2d86c8-7891-4421-b2c3-914ed8948d3c","Type":"ContainerStarted","Data":"97cf5d8555448267da340a561f758bfebab0b45b752c2e376fb5db20319922e8"} Dec 08 21:39:54 crc kubenswrapper[4912]: I1208 21:39:54.820271 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-vwsgt" event={"ID":"0456360f-7543-4af2-ad73-07d0332d3ce2","Type":"ContainerStarted","Data":"4cce70b1cf68069a0797fe8a473e3e874694393a7812b173c874e5806866f570"} Dec 08 21:39:54 crc kubenswrapper[4912]: I1208 21:39:54.821010 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-vwsgt" Dec 08 21:39:54 crc kubenswrapper[4912]: I1208 21:39:54.821829 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dx49f" event={"ID":"12a9d079-3756-4164-967c-be9bd7758724","Type":"ContainerStarted","Data":"9825abecbe2e5c978ce766fe61c0e9c8640ebc4dd003048d689039828bcac712"} Dec 08 21:39:54 crc kubenswrapper[4912]: I1208 21:39:54.825795 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"7a5a0512-8f42-46d4-8806-d410f3270705","Type":"ContainerStarted","Data":"b3771eb5880b38f128ec4e6f008be632ed7f540a58d689df845ca13f577ecebf"} Dec 08 21:39:54 crc kubenswrapper[4912]: I1208 21:39:54.840979 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-vwsgt" podStartSLOduration=23.465562246 podStartE2EDuration="28.840963674s" podCreationTimestamp="2025-12-08 21:39:26 +0000 UTC" firstStartedPulling="2025-12-08 21:39:48.648067641 +0000 UTC m=+1270.511069734" lastFinishedPulling="2025-12-08 21:39:54.023469089 +0000 UTC m=+1275.886471162" observedRunningTime="2025-12-08 21:39:54.837334975 +0000 UTC m=+1276.700337058" watchObservedRunningTime="2025-12-08 21:39:54.840963674 +0000 UTC m=+1276.703965747" Dec 08 21:39:55 crc kubenswrapper[4912]: I1208 21:39:55.833601 4912 generic.go:334] "Generic (PLEG): container finished" podID="12a9d079-3756-4164-967c-be9bd7758724" containerID="9825abecbe2e5c978ce766fe61c0e9c8640ebc4dd003048d689039828bcac712" exitCode=0 Dec 08 21:39:55 crc kubenswrapper[4912]: I1208 21:39:55.833648 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dx49f" event={"ID":"12a9d079-3756-4164-967c-be9bd7758724","Type":"ContainerDied","Data":"9825abecbe2e5c978ce766fe61c0e9c8640ebc4dd003048d689039828bcac712"} Dec 08 21:39:56 crc kubenswrapper[4912]: I1208 21:39:56.844351 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"fe2d86c8-7891-4421-b2c3-914ed8948d3c","Type":"ContainerStarted","Data":"fe5c49cdc1f87496883000f13578388ce61e60af6f8eb9c607850bcc3714e847"} Dec 08 21:39:56 crc kubenswrapper[4912]: I1208 21:39:56.849849 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dx49f" event={"ID":"12a9d079-3756-4164-967c-be9bd7758724","Type":"ContainerStarted","Data":"d52a02f4ac6bd631a414250a2ed30b0e6c5967a937fff6413b16f5cb772183c0"} Dec 08 21:39:57 crc kubenswrapper[4912]: I1208 21:39:57.866750 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dx49f" event={"ID":"12a9d079-3756-4164-967c-be9bd7758724","Type":"ContainerStarted","Data":"5c6de1a4556867164a8d9797aaa8b0db1200d17fbdcbb5e32b2cf44fe1cb409f"} Dec 08 21:39:57 crc kubenswrapper[4912]: I1208 21:39:57.867329 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:39:57 crc kubenswrapper[4912]: I1208 21:39:57.901804 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-dx49f" podStartSLOduration=27.322613974 podStartE2EDuration="31.90121023s" podCreationTimestamp="2025-12-08 21:39:26 +0000 UTC" firstStartedPulling="2025-12-08 21:39:49.422297369 +0000 UTC m=+1271.285299452" lastFinishedPulling="2025-12-08 21:39:54.000893625 +0000 UTC m=+1275.863895708" observedRunningTime="2025-12-08 21:39:57.893648045 +0000 UTC m=+1279.756650128" watchObservedRunningTime="2025-12-08 21:39:57.90121023 +0000 UTC m=+1279.764212313" Dec 08 21:39:58 crc kubenswrapper[4912]: I1208 21:39:58.875571 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:40:00 crc kubenswrapper[4912]: I1208 21:40:00.908216 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"914d6cd9-3b13-4f31-bed5-aaf5c553cea9","Type":"ContainerStarted","Data":"4455ea1a0747400d5c02aee3b3610d63615df5f1fbaa7cd529c4f2657a2df886"} Dec 08 21:40:00 crc kubenswrapper[4912]: I1208 21:40:00.909762 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"35406345-ffad-4596-b323-22e156a4e481","Type":"ContainerStarted","Data":"8de306e269757c521448eaa5810f7c3db39bfbf646b9d2efb3c914ee35b9741e"} Dec 08 21:40:00 crc kubenswrapper[4912]: I1208 21:40:00.912442 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"0a8e59b7-40e5-4c2e-aead-21245661c02e","Type":"ContainerStarted","Data":"d3af933c6af06f62a2de52eef595183e793592bf6797ebbad138ba91cbae5b89"} Dec 08 21:40:00 crc kubenswrapper[4912]: I1208 21:40:00.912813 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 08 21:40:00 crc kubenswrapper[4912]: I1208 21:40:00.924209 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"7a5a0512-8f42-46d4-8806-d410f3270705","Type":"ContainerStarted","Data":"bfb7e7e94b7368bf05d4460add9b0bbc8a5b5aba041cf0050b8694cb725fddff"} Dec 08 21:40:00 crc kubenswrapper[4912]: I1208 21:40:00.929170 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"fe2d86c8-7891-4421-b2c3-914ed8948d3c","Type":"ContainerStarted","Data":"f4d8d958bd24111f1aa85051781ee7d9b748e25472cfd3e62051930c40514dc6"} Dec 08 21:40:00 crc kubenswrapper[4912]: I1208 21:40:00.952879 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.458218061 podStartE2EDuration="39.952861473s" podCreationTimestamp="2025-12-08 21:39:21 +0000 UTC" firstStartedPulling="2025-12-08 21:39:22.343579828 +0000 UTC m=+1244.206581911" lastFinishedPulling="2025-12-08 21:39:59.83822324 +0000 UTC m=+1281.701225323" observedRunningTime="2025-12-08 21:40:00.947517888 +0000 UTC m=+1282.810519971" watchObservedRunningTime="2025-12-08 21:40:00.952861473 +0000 UTC m=+1282.815863546" Dec 08 21:40:00 crc kubenswrapper[4912]: I1208 21:40:00.977157 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=20.960992634 podStartE2EDuration="31.977140903s" podCreationTimestamp="2025-12-08 21:39:29 +0000 UTC" firstStartedPulling="2025-12-08 21:39:48.748173752 +0000 UTC m=+1270.611175835" lastFinishedPulling="2025-12-08 21:39:59.764322021 +0000 UTC m=+1281.627324104" observedRunningTime="2025-12-08 21:40:00.970167704 +0000 UTC m=+1282.833169797" watchObservedRunningTime="2025-12-08 21:40:00.977140903 +0000 UTC m=+1282.840142986" Dec 08 21:40:00 crc kubenswrapper[4912]: I1208 21:40:00.999359 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=22.438772791 podStartE2EDuration="31.999341307s" podCreationTimestamp="2025-12-08 21:39:29 +0000 UTC" firstStartedPulling="2025-12-08 21:39:50.277656624 +0000 UTC m=+1272.140658707" lastFinishedPulling="2025-12-08 21:39:59.83822514 +0000 UTC m=+1281.701227223" observedRunningTime="2025-12-08 21:40:00.99137844 +0000 UTC m=+1282.854380523" watchObservedRunningTime="2025-12-08 21:40:00.999341307 +0000 UTC m=+1282.862343390" Dec 08 21:40:01 crc kubenswrapper[4912]: I1208 21:40:01.078588 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:01 crc kubenswrapper[4912]: I1208 21:40:01.078634 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:01 crc kubenswrapper[4912]: I1208 21:40:01.103244 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:01 crc kubenswrapper[4912]: I1208 21:40:01.103303 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:01 crc kubenswrapper[4912]: I1208 21:40:01.123613 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:01 crc kubenswrapper[4912]: I1208 21:40:01.154293 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:01 crc kubenswrapper[4912]: I1208 21:40:01.937717 4912 generic.go:334] "Generic (PLEG): container finished" podID="000f4539-6aeb-4701-9964-d94c58cf9154" containerID="bd63ec226e82cefa2678f8fc32c0d14acdc122423d4587600e7a2826d06918d3" exitCode=0 Dec 08 21:40:01 crc kubenswrapper[4912]: I1208 21:40:01.937885 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" event={"ID":"000f4539-6aeb-4701-9964-d94c58cf9154","Type":"ContainerDied","Data":"bd63ec226e82cefa2678f8fc32c0d14acdc122423d4587600e7a2826d06918d3"} Dec 08 21:40:01 crc kubenswrapper[4912]: I1208 21:40:01.993760 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.008190 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.279909 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7p4lq"] Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.311958 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-tm2qm"] Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.313382 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.317278 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.331793 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-tm2qm"] Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.385102 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-mbvfl"] Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.386154 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.389633 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.422998 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-mbvfl"] Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.447377 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6htjg\" (UniqueName: \"kubernetes.io/projected/cd7592d1-57a9-4de2-89bc-e40adaf6116d-kube-api-access-6htjg\") pod \"dnsmasq-dns-7f896c8c65-tm2qm\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.447592 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4vwj\" (UniqueName: \"kubernetes.io/projected/55fdb9f6-a68c-4e39-9d14-394de4306337-kube-api-access-l4vwj\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.447653 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/55fdb9f6-a68c-4e39-9d14-394de4306337-ovn-rundir\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.447676 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/55fdb9f6-a68c-4e39-9d14-394de4306337-ovs-rundir\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.447712 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-config\") pod \"dnsmasq-dns-7f896c8c65-tm2qm\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.447812 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55fdb9f6-a68c-4e39-9d14-394de4306337-combined-ca-bundle\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.447941 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/55fdb9f6-a68c-4e39-9d14-394de4306337-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.448003 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55fdb9f6-a68c-4e39-9d14-394de4306337-config\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.448051 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-tm2qm\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.448089 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-tm2qm\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.554239 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.556023 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.560887 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55fdb9f6-a68c-4e39-9d14-394de4306337-config\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.560985 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-tm2qm\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.561022 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-tm2qm\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.561099 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6htjg\" (UniqueName: \"kubernetes.io/projected/cd7592d1-57a9-4de2-89bc-e40adaf6116d-kube-api-access-6htjg\") pod \"dnsmasq-dns-7f896c8c65-tm2qm\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.561213 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4vwj\" (UniqueName: \"kubernetes.io/projected/55fdb9f6-a68c-4e39-9d14-394de4306337-kube-api-access-l4vwj\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.561253 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/55fdb9f6-a68c-4e39-9d14-394de4306337-ovn-rundir\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.561283 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/55fdb9f6-a68c-4e39-9d14-394de4306337-ovs-rundir\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.561314 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-config\") pod \"dnsmasq-dns-7f896c8c65-tm2qm\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.561395 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55fdb9f6-a68c-4e39-9d14-394de4306337-combined-ca-bundle\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.561501 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/55fdb9f6-a68c-4e39-9d14-394de4306337-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.562628 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.563013 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.563235 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-d5pk6" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.564349 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55fdb9f6-a68c-4e39-9d14-394de4306337-config\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.564739 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-tm2qm\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.565537 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-config\") pod \"dnsmasq-dns-7f896c8c65-tm2qm\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.565600 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-tm2qm\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.565803 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/55fdb9f6-a68c-4e39-9d14-394de4306337-ovn-rundir\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.565820 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/55fdb9f6-a68c-4e39-9d14-394de4306337-ovs-rundir\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.568660 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/55fdb9f6-a68c-4e39-9d14-394de4306337-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.575611 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-xgtpp"] Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.579531 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.579898 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55fdb9f6-a68c-4e39-9d14-394de4306337-combined-ca-bundle\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.584950 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.601836 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-mb992"] Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.606301 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.604473 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4vwj\" (UniqueName: \"kubernetes.io/projected/55fdb9f6-a68c-4e39-9d14-394de4306337-kube-api-access-l4vwj\") pod \"ovn-controller-metrics-mbvfl\" (UID: \"55fdb9f6-a68c-4e39-9d14-394de4306337\") " pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.613459 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.629449 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6htjg\" (UniqueName: \"kubernetes.io/projected/cd7592d1-57a9-4de2-89bc-e40adaf6116d-kube-api-access-6htjg\") pod \"dnsmasq-dns-7f896c8c65-tm2qm\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.644853 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.662821 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1d0808e2-0fb2-45d9-a814-001c0f02f969-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.663325 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.663442 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d0808e2-0fb2-45d9-a814-001c0f02f969-config\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.663577 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.663718 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-config\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.663817 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d0808e2-0fb2-45d9-a814-001c0f02f969-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.663908 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d0808e2-0fb2-45d9-a814-001c0f02f969-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.664076 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhpf6\" (UniqueName: \"kubernetes.io/projected/1d0808e2-0fb2-45d9-a814-001c0f02f969-kube-api-access-nhpf6\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.664341 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1d0808e2-0fb2-45d9-a814-001c0f02f969-scripts\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.664486 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.664573 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d0808e2-0fb2-45d9-a814-001c0f02f969-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.664692 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cd2l\" (UniqueName: \"kubernetes.io/projected/ebd46e0e-6292-457b-a817-a9686944fc2e-kube-api-access-5cd2l\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.676987 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-mb992"] Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.728336 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-mbvfl" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.766435 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhpf6\" (UniqueName: \"kubernetes.io/projected/1d0808e2-0fb2-45d9-a814-001c0f02f969-kube-api-access-nhpf6\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.766740 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1d0808e2-0fb2-45d9-a814-001c0f02f969-scripts\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.766764 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.766786 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d0808e2-0fb2-45d9-a814-001c0f02f969-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.766821 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cd2l\" (UniqueName: \"kubernetes.io/projected/ebd46e0e-6292-457b-a817-a9686944fc2e-kube-api-access-5cd2l\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.766838 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1d0808e2-0fb2-45d9-a814-001c0f02f969-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.766858 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.766877 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d0808e2-0fb2-45d9-a814-001c0f02f969-config\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.766909 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.766948 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-config\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.766967 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d0808e2-0fb2-45d9-a814-001c0f02f969-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.766981 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d0808e2-0fb2-45d9-a814-001c0f02f969-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.768204 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1d0808e2-0fb2-45d9-a814-001c0f02f969-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.768258 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.768372 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.769466 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-config\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.769524 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1d0808e2-0fb2-45d9-a814-001c0f02f969-scripts\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.769642 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.770832 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d0808e2-0fb2-45d9-a814-001c0f02f969-config\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.774544 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d0808e2-0fb2-45d9-a814-001c0f02f969-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.781976 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d0808e2-0fb2-45d9-a814-001c0f02f969-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.788005 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d0808e2-0fb2-45d9-a814-001c0f02f969-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.790784 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cd2l\" (UniqueName: \"kubernetes.io/projected/ebd46e0e-6292-457b-a817-a9686944fc2e-kube-api-access-5cd2l\") pod \"dnsmasq-dns-86db49b7ff-mb992\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.805497 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhpf6\" (UniqueName: \"kubernetes.io/projected/1d0808e2-0fb2-45d9-a814-001c0f02f969-kube-api-access-nhpf6\") pod \"ovn-northd-0\" (UID: \"1d0808e2-0fb2-45d9-a814-001c0f02f969\") " pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.947757 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.951970 4912 generic.go:334] "Generic (PLEG): container finished" podID="4d48c15b-83a3-497c-bb82-7b000b9494f2" containerID="dd39f074cb740f2bf310067a756bff3f729908891af50097db89cfd441067393" exitCode=0 Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.952232 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" event={"ID":"4d48c15b-83a3-497c-bb82-7b000b9494f2","Type":"ContainerDied","Data":"dd39f074cb740f2bf310067a756bff3f729908891af50097db89cfd441067393"} Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.964249 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" podUID="000f4539-6aeb-4701-9964-d94c58cf9154" containerName="dnsmasq-dns" containerID="cri-o://9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d" gracePeriod=10 Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.965316 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" event={"ID":"000f4539-6aeb-4701-9964-d94c58cf9154","Type":"ContainerStarted","Data":"9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d"} Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.965999 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:40:02 crc kubenswrapper[4912]: I1208 21:40:02.969867 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.002616 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" podStartSLOduration=3.995251715 podStartE2EDuration="47.002595728s" podCreationTimestamp="2025-12-08 21:39:16 +0000 UTC" firstStartedPulling="2025-12-08 21:39:17.844446813 +0000 UTC m=+1239.707448896" lastFinishedPulling="2025-12-08 21:40:00.851790826 +0000 UTC m=+1282.714792909" observedRunningTime="2025-12-08 21:40:02.998314091 +0000 UTC m=+1284.861316194" watchObservedRunningTime="2025-12-08 21:40:03.002595728 +0000 UTC m=+1284.865597801" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.043447 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-mbvfl"] Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.181371 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-tm2qm"] Dec 08 21:40:03 crc kubenswrapper[4912]: W1208 21:40:03.201280 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd7592d1_57a9_4de2_89bc_e40adaf6116d.slice/crio-8b0fe81f96c6dc7f0d554612a796b2a27017c2591f6d86b9348075a9dcebf2f2 WatchSource:0}: Error finding container 8b0fe81f96c6dc7f0d554612a796b2a27017c2591f6d86b9348075a9dcebf2f2: Status 404 returned error can't find the container with id 8b0fe81f96c6dc7f0d554612a796b2a27017c2591f6d86b9348075a9dcebf2f2 Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.360981 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.393575 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d48c15b-83a3-497c-bb82-7b000b9494f2-config\") pod \"4d48c15b-83a3-497c-bb82-7b000b9494f2\" (UID: \"4d48c15b-83a3-497c-bb82-7b000b9494f2\") " Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.393651 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6nj8\" (UniqueName: \"kubernetes.io/projected/4d48c15b-83a3-497c-bb82-7b000b9494f2-kube-api-access-x6nj8\") pod \"4d48c15b-83a3-497c-bb82-7b000b9494f2\" (UID: \"4d48c15b-83a3-497c-bb82-7b000b9494f2\") " Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.393710 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d48c15b-83a3-497c-bb82-7b000b9494f2-dns-svc\") pod \"4d48c15b-83a3-497c-bb82-7b000b9494f2\" (UID: \"4d48c15b-83a3-497c-bb82-7b000b9494f2\") " Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.399813 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d48c15b-83a3-497c-bb82-7b000b9494f2-kube-api-access-x6nj8" (OuterVolumeSpecName: "kube-api-access-x6nj8") pod "4d48c15b-83a3-497c-bb82-7b000b9494f2" (UID: "4d48c15b-83a3-497c-bb82-7b000b9494f2"). InnerVolumeSpecName "kube-api-access-x6nj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.428430 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d48c15b-83a3-497c-bb82-7b000b9494f2-config" (OuterVolumeSpecName: "config") pod "4d48c15b-83a3-497c-bb82-7b000b9494f2" (UID: "4d48c15b-83a3-497c-bb82-7b000b9494f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.430704 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d48c15b-83a3-497c-bb82-7b000b9494f2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4d48c15b-83a3-497c-bb82-7b000b9494f2" (UID: "4d48c15b-83a3-497c-bb82-7b000b9494f2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.495866 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d48c15b-83a3-497c-bb82-7b000b9494f2-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.495905 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6nj8\" (UniqueName: \"kubernetes.io/projected/4d48c15b-83a3-497c-bb82-7b000b9494f2-kube-api-access-x6nj8\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.495922 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d48c15b-83a3-497c-bb82-7b000b9494f2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.568411 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.596578 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/000f4539-6aeb-4701-9964-d94c58cf9154-config\") pod \"000f4539-6aeb-4701-9964-d94c58cf9154\" (UID: \"000f4539-6aeb-4701-9964-d94c58cf9154\") " Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.596721 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/000f4539-6aeb-4701-9964-d94c58cf9154-dns-svc\") pod \"000f4539-6aeb-4701-9964-d94c58cf9154\" (UID: \"000f4539-6aeb-4701-9964-d94c58cf9154\") " Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.596824 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bszcd\" (UniqueName: \"kubernetes.io/projected/000f4539-6aeb-4701-9964-d94c58cf9154-kube-api-access-bszcd\") pod \"000f4539-6aeb-4701-9964-d94c58cf9154\" (UID: \"000f4539-6aeb-4701-9964-d94c58cf9154\") " Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.600880 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/000f4539-6aeb-4701-9964-d94c58cf9154-kube-api-access-bszcd" (OuterVolumeSpecName: "kube-api-access-bszcd") pod "000f4539-6aeb-4701-9964-d94c58cf9154" (UID: "000f4539-6aeb-4701-9964-d94c58cf9154"). InnerVolumeSpecName "kube-api-access-bszcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:03 crc kubenswrapper[4912]: W1208 21:40:03.607373 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podebd46e0e_6292_457b_a817_a9686944fc2e.slice/crio-190aaf0f1bedcc0be30afd5dcffae2f01a233c6b4880d5d1e56fb41e274e68c6 WatchSource:0}: Error finding container 190aaf0f1bedcc0be30afd5dcffae2f01a233c6b4880d5d1e56fb41e274e68c6: Status 404 returned error can't find the container with id 190aaf0f1bedcc0be30afd5dcffae2f01a233c6b4880d5d1e56fb41e274e68c6 Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.612483 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-mb992"] Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.619492 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.668075 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/000f4539-6aeb-4701-9964-d94c58cf9154-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "000f4539-6aeb-4701-9964-d94c58cf9154" (UID: "000f4539-6aeb-4701-9964-d94c58cf9154"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.681703 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/000f4539-6aeb-4701-9964-d94c58cf9154-config" (OuterVolumeSpecName: "config") pod "000f4539-6aeb-4701-9964-d94c58cf9154" (UID: "000f4539-6aeb-4701-9964-d94c58cf9154"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.699185 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/000f4539-6aeb-4701-9964-d94c58cf9154-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.699220 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bszcd\" (UniqueName: \"kubernetes.io/projected/000f4539-6aeb-4701-9964-d94c58cf9154-kube-api-access-bszcd\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.699230 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/000f4539-6aeb-4701-9964-d94c58cf9154-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.973455 4912 generic.go:334] "Generic (PLEG): container finished" podID="000f4539-6aeb-4701-9964-d94c58cf9154" containerID="9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d" exitCode=0 Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.973539 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" event={"ID":"000f4539-6aeb-4701-9964-d94c58cf9154","Type":"ContainerDied","Data":"9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d"} Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.973579 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.973843 4912 scope.go:117] "RemoveContainer" containerID="9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d" Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.973827 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7p4lq" event={"ID":"000f4539-6aeb-4701-9964-d94c58cf9154","Type":"ContainerDied","Data":"1d4c406dbf980bccce7961a8d24b3e22653e4635ea230abb6ed9c4cbbbd678bd"} Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.976127 4912 generic.go:334] "Generic (PLEG): container finished" podID="ebd46e0e-6292-457b-a817-a9686944fc2e" containerID="88150e1d4ae584b33c4d6ee57c2ffefed7006419ad3f5b5101261b39e135a0d2" exitCode=0 Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.976228 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" event={"ID":"ebd46e0e-6292-457b-a817-a9686944fc2e","Type":"ContainerDied","Data":"88150e1d4ae584b33c4d6ee57c2ffefed7006419ad3f5b5101261b39e135a0d2"} Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.976296 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" event={"ID":"ebd46e0e-6292-457b-a817-a9686944fc2e","Type":"ContainerStarted","Data":"190aaf0f1bedcc0be30afd5dcffae2f01a233c6b4880d5d1e56fb41e274e68c6"} Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.981987 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-mbvfl" event={"ID":"55fdb9f6-a68c-4e39-9d14-394de4306337","Type":"ContainerStarted","Data":"a1e81374e398a214580d61f9fbdf7e525e995e0c0792f769deb06a517f05958b"} Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.982053 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-mbvfl" event={"ID":"55fdb9f6-a68c-4e39-9d14-394de4306337","Type":"ContainerStarted","Data":"312c8346e228bb58ffe7335d0479bcc5f02791a4072d67c0bc687720c01b6610"} Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.986249 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1d0808e2-0fb2-45d9-a814-001c0f02f969","Type":"ContainerStarted","Data":"e5f37d5a1c6eeefc84e74e3ddb040b430d7b7852c9d854ace87a515aeab84319"} Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.988099 4912 generic.go:334] "Generic (PLEG): container finished" podID="35406345-ffad-4596-b323-22e156a4e481" containerID="8de306e269757c521448eaa5810f7c3db39bfbf646b9d2efb3c914ee35b9741e" exitCode=0 Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.988164 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"35406345-ffad-4596-b323-22e156a4e481","Type":"ContainerDied","Data":"8de306e269757c521448eaa5810f7c3db39bfbf646b9d2efb3c914ee35b9741e"} Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.993546 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" event={"ID":"4d48c15b-83a3-497c-bb82-7b000b9494f2","Type":"ContainerDied","Data":"78cbac5f15dfb9f6517d52fd0e81d00a78e6828694606a7dce04127874e9530b"} Dec 08 21:40:03 crc kubenswrapper[4912]: I1208 21:40:03.993731 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-xgtpp" Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.007056 4912 generic.go:334] "Generic (PLEG): container finished" podID="cd7592d1-57a9-4de2-89bc-e40adaf6116d" containerID="fe713a2587d77521c2b20d8ba4f519c74657691a3c1b288f96bb92a45944e8be" exitCode=0 Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.007195 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" event={"ID":"cd7592d1-57a9-4de2-89bc-e40adaf6116d","Type":"ContainerDied","Data":"fe713a2587d77521c2b20d8ba4f519c74657691a3c1b288f96bb92a45944e8be"} Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.022236 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" event={"ID":"cd7592d1-57a9-4de2-89bc-e40adaf6116d","Type":"ContainerStarted","Data":"8b0fe81f96c6dc7f0d554612a796b2a27017c2591f6d86b9348075a9dcebf2f2"} Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.050695 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-mbvfl" podStartSLOduration=2.050678331 podStartE2EDuration="2.050678331s" podCreationTimestamp="2025-12-08 21:40:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:04.045158991 +0000 UTC m=+1285.908161084" watchObservedRunningTime="2025-12-08 21:40:04.050678331 +0000 UTC m=+1285.913680414" Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.236867 4912 scope.go:117] "RemoveContainer" containerID="bd63ec226e82cefa2678f8fc32c0d14acdc122423d4587600e7a2826d06918d3" Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.237350 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7p4lq"] Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.260489 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7p4lq"] Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.291891 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-xgtpp"] Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.299746 4912 scope.go:117] "RemoveContainer" containerID="9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d" Dec 08 21:40:04 crc kubenswrapper[4912]: E1208 21:40:04.301786 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d\": container with ID starting with 9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d not found: ID does not exist" containerID="9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d" Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.301844 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d"} err="failed to get container status \"9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d\": rpc error: code = NotFound desc = could not find container \"9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d\": container with ID starting with 9beb05b95b2d3b321a44f14494bb55d85eb4861a1f5026dc72d1bbe3a370fd9d not found: ID does not exist" Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.301879 4912 scope.go:117] "RemoveContainer" containerID="bd63ec226e82cefa2678f8fc32c0d14acdc122423d4587600e7a2826d06918d3" Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.307574 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-xgtpp"] Dec 08 21:40:04 crc kubenswrapper[4912]: E1208 21:40:04.311070 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd63ec226e82cefa2678f8fc32c0d14acdc122423d4587600e7a2826d06918d3\": container with ID starting with bd63ec226e82cefa2678f8fc32c0d14acdc122423d4587600e7a2826d06918d3 not found: ID does not exist" containerID="bd63ec226e82cefa2678f8fc32c0d14acdc122423d4587600e7a2826d06918d3" Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.311131 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd63ec226e82cefa2678f8fc32c0d14acdc122423d4587600e7a2826d06918d3"} err="failed to get container status \"bd63ec226e82cefa2678f8fc32c0d14acdc122423d4587600e7a2826d06918d3\": rpc error: code = NotFound desc = could not find container \"bd63ec226e82cefa2678f8fc32c0d14acdc122423d4587600e7a2826d06918d3\": container with ID starting with bd63ec226e82cefa2678f8fc32c0d14acdc122423d4587600e7a2826d06918d3 not found: ID does not exist" Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.311168 4912 scope.go:117] "RemoveContainer" containerID="dd39f074cb740f2bf310067a756bff3f729908891af50097db89cfd441067393" Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.444149 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="000f4539-6aeb-4701-9964-d94c58cf9154" path="/var/lib/kubelet/pods/000f4539-6aeb-4701-9964-d94c58cf9154/volumes" Dec 08 21:40:04 crc kubenswrapper[4912]: I1208 21:40:04.445759 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d48c15b-83a3-497c-bb82-7b000b9494f2" path="/var/lib/kubelet/pods/4d48c15b-83a3-497c-bb82-7b000b9494f2/volumes" Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.015756 4912 generic.go:334] "Generic (PLEG): container finished" podID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" containerID="3156642001f58269f8a3ebad775c19bf127b72340dcaa7f55593e3f609abdf3b" exitCode=1 Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.015811 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerDied","Data":"3156642001f58269f8a3ebad775c19bf127b72340dcaa7f55593e3f609abdf3b"} Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.016324 4912 scope.go:117] "RemoveContainer" containerID="3156642001f58269f8a3ebad775c19bf127b72340dcaa7f55593e3f609abdf3b" Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.019500 4912 generic.go:334] "Generic (PLEG): container finished" podID="914d6cd9-3b13-4f31-bed5-aaf5c553cea9" containerID="4455ea1a0747400d5c02aee3b3610d63615df5f1fbaa7cd529c4f2657a2df886" exitCode=0 Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.019576 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"914d6cd9-3b13-4f31-bed5-aaf5c553cea9","Type":"ContainerDied","Data":"4455ea1a0747400d5c02aee3b3610d63615df5f1fbaa7cd529c4f2657a2df886"} Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.022383 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"35406345-ffad-4596-b323-22e156a4e481","Type":"ContainerStarted","Data":"bf0b8dc3cab1bb41fa6e296797a2ebab4828ebd4ccc68b71aaa4d722e8707113"} Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.026410 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" event={"ID":"cd7592d1-57a9-4de2-89bc-e40adaf6116d","Type":"ContainerStarted","Data":"2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274"} Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.026543 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.028488 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" event={"ID":"ebd46e0e-6292-457b-a817-a9686944fc2e","Type":"ContainerStarted","Data":"07f4ee88a152e907c7fbbe874a321d6f4dfd31f8776f9c24f043f08f9713ab2b"} Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.028517 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.072561 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=8.598355976 podStartE2EDuration="46.072545792s" podCreationTimestamp="2025-12-08 21:39:19 +0000 UTC" firstStartedPulling="2025-12-08 21:39:22.364860977 +0000 UTC m=+1244.227863060" lastFinishedPulling="2025-12-08 21:39:59.839050793 +0000 UTC m=+1281.702052876" observedRunningTime="2025-12-08 21:40:05.067903816 +0000 UTC m=+1286.930905899" watchObservedRunningTime="2025-12-08 21:40:05.072545792 +0000 UTC m=+1286.935547875" Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.090049 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" podStartSLOduration=3.090020327 podStartE2EDuration="3.090020327s" podCreationTimestamp="2025-12-08 21:40:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:05.087851519 +0000 UTC m=+1286.950853602" watchObservedRunningTime="2025-12-08 21:40:05.090020327 +0000 UTC m=+1286.953022410" Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.109942 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" podStartSLOduration=3.109922549 podStartE2EDuration="3.109922549s" podCreationTimestamp="2025-12-08 21:40:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:05.103813563 +0000 UTC m=+1286.966815656" watchObservedRunningTime="2025-12-08 21:40:05.109922549 +0000 UTC m=+1286.972924632" Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.250086 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:40:05 crc kubenswrapper[4912]: I1208 21:40:05.250431 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:40:06 crc kubenswrapper[4912]: I1208 21:40:06.037394 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"914d6cd9-3b13-4f31-bed5-aaf5c553cea9","Type":"ContainerStarted","Data":"77335abaac15e11ff6afa66a66830c50e0efac7318ebc3a0e819dfd5c20710e9"} Dec 08 21:40:06 crc kubenswrapper[4912]: I1208 21:40:06.040359 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerStarted","Data":"38f41ffe6e495e48d03b9058b53b45cbc362a8af403a678ee846d41d145aba9f"} Dec 08 21:40:06 crc kubenswrapper[4912]: I1208 21:40:06.040481 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:40:06 crc kubenswrapper[4912]: I1208 21:40:06.043182 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1d0808e2-0fb2-45d9-a814-001c0f02f969","Type":"ContainerStarted","Data":"e06a1cb1c685e121959f697a8a83a3d7e9ce3970d4211e1c42307bff5fdc58fe"} Dec 08 21:40:06 crc kubenswrapper[4912]: I1208 21:40:06.043219 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1d0808e2-0fb2-45d9-a814-001c0f02f969","Type":"ContainerStarted","Data":"ff36fc53a6714d740d1cc5018029e8ec2e099ecda93c884f95bebaaafbe5d3f6"} Dec 08 21:40:06 crc kubenswrapper[4912]: I1208 21:40:06.062626 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371988.792168 podStartE2EDuration="48.062608949s" podCreationTimestamp="2025-12-08 21:39:18 +0000 UTC" firstStartedPulling="2025-12-08 21:39:21.1599579 +0000 UTC m=+1243.022959983" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:06.056529324 +0000 UTC m=+1287.919531417" watchObservedRunningTime="2025-12-08 21:40:06.062608949 +0000 UTC m=+1287.925611032" Dec 08 21:40:06 crc kubenswrapper[4912]: I1208 21:40:06.102972 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.633814594 podStartE2EDuration="4.102957105s" podCreationTimestamp="2025-12-08 21:40:02 +0000 UTC" firstStartedPulling="2025-12-08 21:40:03.610736621 +0000 UTC m=+1285.473738704" lastFinishedPulling="2025-12-08 21:40:05.079879132 +0000 UTC m=+1286.942881215" observedRunningTime="2025-12-08 21:40:06.099270455 +0000 UTC m=+1287.962272538" watchObservedRunningTime="2025-12-08 21:40:06.102957105 +0000 UTC m=+1287.965959188" Dec 08 21:40:06 crc kubenswrapper[4912]: I1208 21:40:06.709734 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 08 21:40:07 crc kubenswrapper[4912]: I1208 21:40:07.050344 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 08 21:40:10 crc kubenswrapper[4912]: I1208 21:40:10.309126 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 08 21:40:10 crc kubenswrapper[4912]: I1208 21:40:10.309691 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 08 21:40:10 crc kubenswrapper[4912]: I1208 21:40:10.378821 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.157245 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.573103 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-b6dd-account-create-update-wvsx7"] Dec 08 21:40:11 crc kubenswrapper[4912]: E1208 21:40:11.573493 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="000f4539-6aeb-4701-9964-d94c58cf9154" containerName="init" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.573508 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="000f4539-6aeb-4701-9964-d94c58cf9154" containerName="init" Dec 08 21:40:11 crc kubenswrapper[4912]: E1208 21:40:11.573541 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="000f4539-6aeb-4701-9964-d94c58cf9154" containerName="dnsmasq-dns" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.573548 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="000f4539-6aeb-4701-9964-d94c58cf9154" containerName="dnsmasq-dns" Dec 08 21:40:11 crc kubenswrapper[4912]: E1208 21:40:11.573560 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d48c15b-83a3-497c-bb82-7b000b9494f2" containerName="init" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.573568 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d48c15b-83a3-497c-bb82-7b000b9494f2" containerName="init" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.573746 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d48c15b-83a3-497c-bb82-7b000b9494f2" containerName="init" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.573773 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="000f4539-6aeb-4701-9964-d94c58cf9154" containerName="dnsmasq-dns" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.574455 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b6dd-account-create-update-wvsx7" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.576575 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.587627 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b6dd-account-create-update-wvsx7"] Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.629828 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-mwbl8"] Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.631405 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-mwbl8" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.631479 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zk6b\" (UniqueName: \"kubernetes.io/projected/31b6f852-ce10-4c0d-8a14-897b647ae732-kube-api-access-9zk6b\") pod \"placement-b6dd-account-create-update-wvsx7\" (UID: \"31b6f852-ce10-4c0d-8a14-897b647ae732\") " pod="openstack/placement-b6dd-account-create-update-wvsx7" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.631546 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31b6f852-ce10-4c0d-8a14-897b647ae732-operator-scripts\") pod \"placement-b6dd-account-create-update-wvsx7\" (UID: \"31b6f852-ce10-4c0d-8a14-897b647ae732\") " pod="openstack/placement-b6dd-account-create-update-wvsx7" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.638370 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-mwbl8"] Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.690713 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.690765 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.733709 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31b6f852-ce10-4c0d-8a14-897b647ae732-operator-scripts\") pod \"placement-b6dd-account-create-update-wvsx7\" (UID: \"31b6f852-ce10-4c0d-8a14-897b647ae732\") " pod="openstack/placement-b6dd-account-create-update-wvsx7" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.734074 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kp9vp\" (UniqueName: \"kubernetes.io/projected/e452832b-2c91-42f8-be68-d0a6b1ec4b1a-kube-api-access-kp9vp\") pod \"placement-db-create-mwbl8\" (UID: \"e452832b-2c91-42f8-be68-d0a6b1ec4b1a\") " pod="openstack/placement-db-create-mwbl8" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.734374 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e452832b-2c91-42f8-be68-d0a6b1ec4b1a-operator-scripts\") pod \"placement-db-create-mwbl8\" (UID: \"e452832b-2c91-42f8-be68-d0a6b1ec4b1a\") " pod="openstack/placement-db-create-mwbl8" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.734438 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31b6f852-ce10-4c0d-8a14-897b647ae732-operator-scripts\") pod \"placement-b6dd-account-create-update-wvsx7\" (UID: \"31b6f852-ce10-4c0d-8a14-897b647ae732\") " pod="openstack/placement-b6dd-account-create-update-wvsx7" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.734442 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zk6b\" (UniqueName: \"kubernetes.io/projected/31b6f852-ce10-4c0d-8a14-897b647ae732-kube-api-access-9zk6b\") pod \"placement-b6dd-account-create-update-wvsx7\" (UID: \"31b6f852-ce10-4c0d-8a14-897b647ae732\") " pod="openstack/placement-b6dd-account-create-update-wvsx7" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.752189 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zk6b\" (UniqueName: \"kubernetes.io/projected/31b6f852-ce10-4c0d-8a14-897b647ae732-kube-api-access-9zk6b\") pod \"placement-b6dd-account-create-update-wvsx7\" (UID: \"31b6f852-ce10-4c0d-8a14-897b647ae732\") " pod="openstack/placement-b6dd-account-create-update-wvsx7" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.766229 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.835975 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e452832b-2c91-42f8-be68-d0a6b1ec4b1a-operator-scripts\") pod \"placement-db-create-mwbl8\" (UID: \"e452832b-2c91-42f8-be68-d0a6b1ec4b1a\") " pod="openstack/placement-db-create-mwbl8" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.836112 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kp9vp\" (UniqueName: \"kubernetes.io/projected/e452832b-2c91-42f8-be68-d0a6b1ec4b1a-kube-api-access-kp9vp\") pod \"placement-db-create-mwbl8\" (UID: \"e452832b-2c91-42f8-be68-d0a6b1ec4b1a\") " pod="openstack/placement-db-create-mwbl8" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.836865 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e452832b-2c91-42f8-be68-d0a6b1ec4b1a-operator-scripts\") pod \"placement-db-create-mwbl8\" (UID: \"e452832b-2c91-42f8-be68-d0a6b1ec4b1a\") " pod="openstack/placement-db-create-mwbl8" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.853526 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kp9vp\" (UniqueName: \"kubernetes.io/projected/e452832b-2c91-42f8-be68-d0a6b1ec4b1a-kube-api-access-kp9vp\") pod \"placement-db-create-mwbl8\" (UID: \"e452832b-2c91-42f8-be68-d0a6b1ec4b1a\") " pod="openstack/placement-db-create-mwbl8" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.894594 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b6dd-account-create-update-wvsx7" Dec 08 21:40:11 crc kubenswrapper[4912]: I1208 21:40:11.952468 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-mwbl8" Dec 08 21:40:12 crc kubenswrapper[4912]: I1208 21:40:12.231406 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 08 21:40:12 crc kubenswrapper[4912]: I1208 21:40:12.409579 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b6dd-account-create-update-wvsx7"] Dec 08 21:40:12 crc kubenswrapper[4912]: W1208 21:40:12.416276 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31b6f852_ce10_4c0d_8a14_897b647ae732.slice/crio-ef46da7eec66933eb854339ed990c6ab34e00480a3d630e19fd1c6b021c4b0cd WatchSource:0}: Error finding container ef46da7eec66933eb854339ed990c6ab34e00480a3d630e19fd1c6b021c4b0cd: Status 404 returned error can't find the container with id ef46da7eec66933eb854339ed990c6ab34e00480a3d630e19fd1c6b021c4b0cd Dec 08 21:40:12 crc kubenswrapper[4912]: I1208 21:40:12.522231 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-mwbl8"] Dec 08 21:40:12 crc kubenswrapper[4912]: I1208 21:40:12.646858 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:12 crc kubenswrapper[4912]: I1208 21:40:12.971227 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.023665 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-tm2qm"] Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.128621 4912 generic.go:334] "Generic (PLEG): container finished" podID="e452832b-2c91-42f8-be68-d0a6b1ec4b1a" containerID="d959fc509d3b200a40f26432ec7b5ffd6950cbbd7219867602e2d2b07cb21709" exitCode=0 Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.128713 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-mwbl8" event={"ID":"e452832b-2c91-42f8-be68-d0a6b1ec4b1a","Type":"ContainerDied","Data":"d959fc509d3b200a40f26432ec7b5ffd6950cbbd7219867602e2d2b07cb21709"} Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.128741 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-mwbl8" event={"ID":"e452832b-2c91-42f8-be68-d0a6b1ec4b1a","Type":"ContainerStarted","Data":"9e9e22a48b12b9cf1969a12c54b6e946ee7030bd3b664f0e394a174544beb073"} Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.131164 4912 generic.go:334] "Generic (PLEG): container finished" podID="31b6f852-ce10-4c0d-8a14-897b647ae732" containerID="9195ce33b557d0d18d9a557c17087e5a4028c7218a546c9ff1a10df54a5187e0" exitCode=0 Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.131268 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b6dd-account-create-update-wvsx7" event={"ID":"31b6f852-ce10-4c0d-8a14-897b647ae732","Type":"ContainerDied","Data":"9195ce33b557d0d18d9a557c17087e5a4028c7218a546c9ff1a10df54a5187e0"} Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.131308 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b6dd-account-create-update-wvsx7" event={"ID":"31b6f852-ce10-4c0d-8a14-897b647ae732","Type":"ContainerStarted","Data":"ef46da7eec66933eb854339ed990c6ab34e00480a3d630e19fd1c6b021c4b0cd"} Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.131345 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" podUID="cd7592d1-57a9-4de2-89bc-e40adaf6116d" containerName="dnsmasq-dns" containerID="cri-o://2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274" gracePeriod=10 Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.587864 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.715329 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-rhsc4"] Dec 08 21:40:13 crc kubenswrapper[4912]: E1208 21:40:13.715682 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd7592d1-57a9-4de2-89bc-e40adaf6116d" containerName="dnsmasq-dns" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.715693 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd7592d1-57a9-4de2-89bc-e40adaf6116d" containerName="dnsmasq-dns" Dec 08 21:40:13 crc kubenswrapper[4912]: E1208 21:40:13.715709 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd7592d1-57a9-4de2-89bc-e40adaf6116d" containerName="init" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.715716 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd7592d1-57a9-4de2-89bc-e40adaf6116d" containerName="init" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.715914 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd7592d1-57a9-4de2-89bc-e40adaf6116d" containerName="dnsmasq-dns" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.716838 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.748488 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-rhsc4"] Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.782670 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6htjg\" (UniqueName: \"kubernetes.io/projected/cd7592d1-57a9-4de2-89bc-e40adaf6116d-kube-api-access-6htjg\") pod \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.782932 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-dns-svc\") pod \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.782981 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-config\") pod \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.783006 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-ovsdbserver-sb\") pod \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\" (UID: \"cd7592d1-57a9-4de2-89bc-e40adaf6116d\") " Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.797770 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd7592d1-57a9-4de2-89bc-e40adaf6116d-kube-api-access-6htjg" (OuterVolumeSpecName: "kube-api-access-6htjg") pod "cd7592d1-57a9-4de2-89bc-e40adaf6116d" (UID: "cd7592d1-57a9-4de2-89bc-e40adaf6116d"). InnerVolumeSpecName "kube-api-access-6htjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.862828 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cd7592d1-57a9-4de2-89bc-e40adaf6116d" (UID: "cd7592d1-57a9-4de2-89bc-e40adaf6116d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.885378 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-dns-svc\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.885668 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.885788 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.885885 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv4xf\" (UniqueName: \"kubernetes.io/projected/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-kube-api-access-fv4xf\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.886011 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-config\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.886486 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.886527 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6htjg\" (UniqueName: \"kubernetes.io/projected/cd7592d1-57a9-4de2-89bc-e40adaf6116d-kube-api-access-6htjg\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.892648 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-config" (OuterVolumeSpecName: "config") pod "cd7592d1-57a9-4de2-89bc-e40adaf6116d" (UID: "cd7592d1-57a9-4de2-89bc-e40adaf6116d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.929669 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cd7592d1-57a9-4de2-89bc-e40adaf6116d" (UID: "cd7592d1-57a9-4de2-89bc-e40adaf6116d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.987987 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-config\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.988108 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-dns-svc\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.988160 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.988246 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.988451 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv4xf\" (UniqueName: \"kubernetes.io/projected/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-kube-api-access-fv4xf\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.988924 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-config\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.989314 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.988979 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-dns-svc\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.989215 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.989443 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:13 crc kubenswrapper[4912]: I1208 21:40:13.989467 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd7592d1-57a9-4de2-89bc-e40adaf6116d-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.005190 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv4xf\" (UniqueName: \"kubernetes.io/projected/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-kube-api-access-fv4xf\") pod \"dnsmasq-dns-698758b865-rhsc4\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.053166 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.147287 4912 generic.go:334] "Generic (PLEG): container finished" podID="cd7592d1-57a9-4de2-89bc-e40adaf6116d" containerID="2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274" exitCode=0 Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.147380 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.147433 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" event={"ID":"cd7592d1-57a9-4de2-89bc-e40adaf6116d","Type":"ContainerDied","Data":"2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274"} Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.147716 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-tm2qm" event={"ID":"cd7592d1-57a9-4de2-89bc-e40adaf6116d","Type":"ContainerDied","Data":"8b0fe81f96c6dc7f0d554612a796b2a27017c2591f6d86b9348075a9dcebf2f2"} Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.147756 4912 scope.go:117] "RemoveContainer" containerID="2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.189739 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-tm2qm"] Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.196862 4912 scope.go:117] "RemoveContainer" containerID="fe713a2587d77521c2b20d8ba4f519c74657691a3c1b288f96bb92a45944e8be" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.200227 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-tm2qm"] Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.252636 4912 scope.go:117] "RemoveContainer" containerID="2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274" Dec 08 21:40:14 crc kubenswrapper[4912]: E1208 21:40:14.253891 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274\": container with ID starting with 2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274 not found: ID does not exist" containerID="2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.253930 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274"} err="failed to get container status \"2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274\": rpc error: code = NotFound desc = could not find container \"2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274\": container with ID starting with 2822d85f6d9e05e9019b8ce21a134dcc74e18a9e971c98e58ff447629ea26274 not found: ID does not exist" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.253960 4912 scope.go:117] "RemoveContainer" containerID="fe713a2587d77521c2b20d8ba4f519c74657691a3c1b288f96bb92a45944e8be" Dec 08 21:40:14 crc kubenswrapper[4912]: E1208 21:40:14.254346 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe713a2587d77521c2b20d8ba4f519c74657691a3c1b288f96bb92a45944e8be\": container with ID starting with fe713a2587d77521c2b20d8ba4f519c74657691a3c1b288f96bb92a45944e8be not found: ID does not exist" containerID="fe713a2587d77521c2b20d8ba4f519c74657691a3c1b288f96bb92a45944e8be" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.254378 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe713a2587d77521c2b20d8ba4f519c74657691a3c1b288f96bb92a45944e8be"} err="failed to get container status \"fe713a2587d77521c2b20d8ba4f519c74657691a3c1b288f96bb92a45944e8be\": rpc error: code = NotFound desc = could not find container \"fe713a2587d77521c2b20d8ba4f519c74657691a3c1b288f96bb92a45944e8be\": container with ID starting with fe713a2587d77521c2b20d8ba4f519c74657691a3c1b288f96bb92a45944e8be not found: ID does not exist" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.453672 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd7592d1-57a9-4de2-89bc-e40adaf6116d" path="/var/lib/kubelet/pods/cd7592d1-57a9-4de2-89bc-e40adaf6116d/volumes" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.563811 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-rhsc4"] Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.769104 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-mwbl8" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.904155 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 08 21:40:14 crc kubenswrapper[4912]: E1208 21:40:14.904621 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e452832b-2c91-42f8-be68-d0a6b1ec4b1a" containerName="mariadb-database-create" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.904690 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="e452832b-2c91-42f8-be68-d0a6b1ec4b1a" containerName="mariadb-database-create" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.904998 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="e452832b-2c91-42f8-be68-d0a6b1ec4b1a" containerName="mariadb-database-create" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.947422 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.951556 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-zjg9f" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.951652 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.951908 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.952026 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.956716 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.964709 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kp9vp\" (UniqueName: \"kubernetes.io/projected/e452832b-2c91-42f8-be68-d0a6b1ec4b1a-kube-api-access-kp9vp\") pod \"e452832b-2c91-42f8-be68-d0a6b1ec4b1a\" (UID: \"e452832b-2c91-42f8-be68-d0a6b1ec4b1a\") " Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.965167 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e452832b-2c91-42f8-be68-d0a6b1ec4b1a-operator-scripts\") pod \"e452832b-2c91-42f8-be68-d0a6b1ec4b1a\" (UID: \"e452832b-2c91-42f8-be68-d0a6b1ec4b1a\") " Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.966840 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e452832b-2c91-42f8-be68-d0a6b1ec4b1a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e452832b-2c91-42f8-be68-d0a6b1ec4b1a" (UID: "e452832b-2c91-42f8-be68-d0a6b1ec4b1a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:14 crc kubenswrapper[4912]: I1208 21:40:14.980823 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e452832b-2c91-42f8-be68-d0a6b1ec4b1a-kube-api-access-kp9vp" (OuterVolumeSpecName: "kube-api-access-kp9vp") pod "e452832b-2c91-42f8-be68-d0a6b1ec4b1a" (UID: "e452832b-2c91-42f8-be68-d0a6b1ec4b1a"). InnerVolumeSpecName "kube-api-access-kp9vp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.032243 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b6dd-account-create-update-wvsx7" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.067418 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/928f7959-0e9f-4b2d-bfa2-2d970196f49f-cache\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.067560 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-262vv\" (UniqueName: \"kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-kube-api-access-262vv\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.067645 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/928f7959-0e9f-4b2d-bfa2-2d970196f49f-lock\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.067715 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d82e66a1-ee27-48c4-8898-f9c86bcb8dd6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d82e66a1-ee27-48c4-8898-f9c86bcb8dd6\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.067737 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.067810 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kp9vp\" (UniqueName: \"kubernetes.io/projected/e452832b-2c91-42f8-be68-d0a6b1ec4b1a-kube-api-access-kp9vp\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.067852 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e452832b-2c91-42f8-be68-d0a6b1ec4b1a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.158560 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b6dd-account-create-update-wvsx7" event={"ID":"31b6f852-ce10-4c0d-8a14-897b647ae732","Type":"ContainerDied","Data":"ef46da7eec66933eb854339ed990c6ab34e00480a3d630e19fd1c6b021c4b0cd"} Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.158598 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b6dd-account-create-update-wvsx7" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.158605 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef46da7eec66933eb854339ed990c6ab34e00480a3d630e19fd1c6b021c4b0cd" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.161477 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-mwbl8" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.161472 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-mwbl8" event={"ID":"e452832b-2c91-42f8-be68-d0a6b1ec4b1a","Type":"ContainerDied","Data":"9e9e22a48b12b9cf1969a12c54b6e946ee7030bd3b664f0e394a174544beb073"} Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.161596 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e9e22a48b12b9cf1969a12c54b6e946ee7030bd3b664f0e394a174544beb073" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.162479 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-rhsc4" event={"ID":"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb","Type":"ContainerStarted","Data":"7faa65e01b614b276983caf5ffce13cef336779798921ed0938f63e14694df74"} Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.168655 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31b6f852-ce10-4c0d-8a14-897b647ae732-operator-scripts\") pod \"31b6f852-ce10-4c0d-8a14-897b647ae732\" (UID: \"31b6f852-ce10-4c0d-8a14-897b647ae732\") " Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.168740 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zk6b\" (UniqueName: \"kubernetes.io/projected/31b6f852-ce10-4c0d-8a14-897b647ae732-kube-api-access-9zk6b\") pod \"31b6f852-ce10-4c0d-8a14-897b647ae732\" (UID: \"31b6f852-ce10-4c0d-8a14-897b647ae732\") " Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.168990 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/928f7959-0e9f-4b2d-bfa2-2d970196f49f-cache\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.169109 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-262vv\" (UniqueName: \"kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-kube-api-access-262vv\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.169147 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/928f7959-0e9f-4b2d-bfa2-2d970196f49f-lock\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.169200 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d82e66a1-ee27-48c4-8898-f9c86bcb8dd6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d82e66a1-ee27-48c4-8898-f9c86bcb8dd6\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.169226 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: E1208 21:40:15.169403 4912 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 08 21:40:15 crc kubenswrapper[4912]: E1208 21:40:15.169428 4912 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 08 21:40:15 crc kubenswrapper[4912]: E1208 21:40:15.169482 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift podName:928f7959-0e9f-4b2d-bfa2-2d970196f49f nodeName:}" failed. No retries permitted until 2025-12-08 21:40:15.66946285 +0000 UTC m=+1297.532464933 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift") pod "swift-storage-0" (UID: "928f7959-0e9f-4b2d-bfa2-2d970196f49f") : configmap "swift-ring-files" not found Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.169537 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/928f7959-0e9f-4b2d-bfa2-2d970196f49f-cache\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.169601 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/928f7959-0e9f-4b2d-bfa2-2d970196f49f-lock\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.169692 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31b6f852-ce10-4c0d-8a14-897b647ae732-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "31b6f852-ce10-4c0d-8a14-897b647ae732" (UID: "31b6f852-ce10-4c0d-8a14-897b647ae732"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.173820 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.173852 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d82e66a1-ee27-48c4-8898-f9c86bcb8dd6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d82e66a1-ee27-48c4-8898-f9c86bcb8dd6\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8a22b4e116834f6dc2672e09a793776c26ddc03bad8e8277ce955cd961e9c094/globalmount\"" pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.175361 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31b6f852-ce10-4c0d-8a14-897b647ae732-kube-api-access-9zk6b" (OuterVolumeSpecName: "kube-api-access-9zk6b") pod "31b6f852-ce10-4c0d-8a14-897b647ae732" (UID: "31b6f852-ce10-4c0d-8a14-897b647ae732"). InnerVolumeSpecName "kube-api-access-9zk6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.187149 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-262vv\" (UniqueName: \"kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-kube-api-access-262vv\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.211317 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d82e66a1-ee27-48c4-8898-f9c86bcb8dd6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d82e66a1-ee27-48c4-8898-f9c86bcb8dd6\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.252761 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.270852 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zk6b\" (UniqueName: \"kubernetes.io/projected/31b6f852-ce10-4c0d-8a14-897b647ae732-kube-api-access-9zk6b\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.270899 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31b6f852-ce10-4c0d-8a14-897b647ae732-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:15 crc kubenswrapper[4912]: I1208 21:40:15.677541 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:15 crc kubenswrapper[4912]: E1208 21:40:15.677772 4912 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 08 21:40:15 crc kubenswrapper[4912]: E1208 21:40:15.677786 4912 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 08 21:40:15 crc kubenswrapper[4912]: E1208 21:40:15.677828 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift podName:928f7959-0e9f-4b2d-bfa2-2d970196f49f nodeName:}" failed. No retries permitted until 2025-12-08 21:40:16.67781542 +0000 UTC m=+1298.540817503 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift") pod "swift-storage-0" (UID: "928f7959-0e9f-4b2d-bfa2-2d970196f49f") : configmap "swift-ring-files" not found Dec 08 21:40:16 crc kubenswrapper[4912]: I1208 21:40:16.172412 4912 generic.go:334] "Generic (PLEG): container finished" podID="c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" containerID="8d1ab3da369d33d9f72100655124f91c9c550d6361224eaf60cbdc9fb7f64537" exitCode=0 Dec 08 21:40:16 crc kubenswrapper[4912]: I1208 21:40:16.172535 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-rhsc4" event={"ID":"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb","Type":"ContainerDied","Data":"8d1ab3da369d33d9f72100655124f91c9c550d6361224eaf60cbdc9fb7f64537"} Dec 08 21:40:16 crc kubenswrapper[4912]: I1208 21:40:16.698918 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:16 crc kubenswrapper[4912]: E1208 21:40:16.699106 4912 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 08 21:40:16 crc kubenswrapper[4912]: E1208 21:40:16.699286 4912 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 08 21:40:16 crc kubenswrapper[4912]: E1208 21:40:16.699347 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift podName:928f7959-0e9f-4b2d-bfa2-2d970196f49f nodeName:}" failed. No retries permitted until 2025-12-08 21:40:18.699332062 +0000 UTC m=+1300.562334145 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift") pod "swift-storage-0" (UID: "928f7959-0e9f-4b2d-bfa2-2d970196f49f") : configmap "swift-ring-files" not found Dec 08 21:40:16 crc kubenswrapper[4912]: I1208 21:40:16.887938 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-2kbmb"] Dec 08 21:40:16 crc kubenswrapper[4912]: E1208 21:40:16.888686 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31b6f852-ce10-4c0d-8a14-897b647ae732" containerName="mariadb-account-create-update" Dec 08 21:40:16 crc kubenswrapper[4912]: I1208 21:40:16.888708 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="31b6f852-ce10-4c0d-8a14-897b647ae732" containerName="mariadb-account-create-update" Dec 08 21:40:16 crc kubenswrapper[4912]: I1208 21:40:16.888950 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="31b6f852-ce10-4c0d-8a14-897b647ae732" containerName="mariadb-account-create-update" Dec 08 21:40:16 crc kubenswrapper[4912]: I1208 21:40:16.889910 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2kbmb" Dec 08 21:40:16 crc kubenswrapper[4912]: I1208 21:40:16.898732 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-2kbmb"] Dec 08 21:40:16 crc kubenswrapper[4912]: I1208 21:40:16.999206 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-83db-account-create-update-bw9rr"] Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.000641 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-83db-account-create-update-bw9rr" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.003107 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.005401 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b00be803-22da-4dfd-b211-cd9b1a44bf80-operator-scripts\") pod \"glance-db-create-2kbmb\" (UID: \"b00be803-22da-4dfd-b211-cd9b1a44bf80\") " pod="openstack/glance-db-create-2kbmb" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.005647 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b88n\" (UniqueName: \"kubernetes.io/projected/b00be803-22da-4dfd-b211-cd9b1a44bf80-kube-api-access-2b88n\") pod \"glance-db-create-2kbmb\" (UID: \"b00be803-22da-4dfd-b211-cd9b1a44bf80\") " pod="openstack/glance-db-create-2kbmb" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.005654 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-83db-account-create-update-bw9rr"] Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.107852 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69df27a2-fe75-49cf-88ef-4fba6c4884f9-operator-scripts\") pod \"glance-83db-account-create-update-bw9rr\" (UID: \"69df27a2-fe75-49cf-88ef-4fba6c4884f9\") " pod="openstack/glance-83db-account-create-update-bw9rr" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.108328 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzdzb\" (UniqueName: \"kubernetes.io/projected/69df27a2-fe75-49cf-88ef-4fba6c4884f9-kube-api-access-lzdzb\") pod \"glance-83db-account-create-update-bw9rr\" (UID: \"69df27a2-fe75-49cf-88ef-4fba6c4884f9\") " pod="openstack/glance-83db-account-create-update-bw9rr" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.108474 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b00be803-22da-4dfd-b211-cd9b1a44bf80-operator-scripts\") pod \"glance-db-create-2kbmb\" (UID: \"b00be803-22da-4dfd-b211-cd9b1a44bf80\") " pod="openstack/glance-db-create-2kbmb" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.108599 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b88n\" (UniqueName: \"kubernetes.io/projected/b00be803-22da-4dfd-b211-cd9b1a44bf80-kube-api-access-2b88n\") pod \"glance-db-create-2kbmb\" (UID: \"b00be803-22da-4dfd-b211-cd9b1a44bf80\") " pod="openstack/glance-db-create-2kbmb" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.109156 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b00be803-22da-4dfd-b211-cd9b1a44bf80-operator-scripts\") pod \"glance-db-create-2kbmb\" (UID: \"b00be803-22da-4dfd-b211-cd9b1a44bf80\") " pod="openstack/glance-db-create-2kbmb" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.126138 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b88n\" (UniqueName: \"kubernetes.io/projected/b00be803-22da-4dfd-b211-cd9b1a44bf80-kube-api-access-2b88n\") pod \"glance-db-create-2kbmb\" (UID: \"b00be803-22da-4dfd-b211-cd9b1a44bf80\") " pod="openstack/glance-db-create-2kbmb" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.181725 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-rhsc4" event={"ID":"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb","Type":"ContainerStarted","Data":"391dec488e0a2457e4b3f944424b62a2f7974216b2b1d4af655770d78cb3c958"} Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.182801 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.205025 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-rhsc4" podStartSLOduration=4.205002448 podStartE2EDuration="4.205002448s" podCreationTimestamp="2025-12-08 21:40:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:17.2024759 +0000 UTC m=+1299.065477983" watchObservedRunningTime="2025-12-08 21:40:17.205002448 +0000 UTC m=+1299.068004521" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.207635 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2kbmb" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.210618 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69df27a2-fe75-49cf-88ef-4fba6c4884f9-operator-scripts\") pod \"glance-83db-account-create-update-bw9rr\" (UID: \"69df27a2-fe75-49cf-88ef-4fba6c4884f9\") " pod="openstack/glance-83db-account-create-update-bw9rr" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.210893 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzdzb\" (UniqueName: \"kubernetes.io/projected/69df27a2-fe75-49cf-88ef-4fba6c4884f9-kube-api-access-lzdzb\") pod \"glance-83db-account-create-update-bw9rr\" (UID: \"69df27a2-fe75-49cf-88ef-4fba6c4884f9\") " pod="openstack/glance-83db-account-create-update-bw9rr" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.212028 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69df27a2-fe75-49cf-88ef-4fba6c4884f9-operator-scripts\") pod \"glance-83db-account-create-update-bw9rr\" (UID: \"69df27a2-fe75-49cf-88ef-4fba6c4884f9\") " pod="openstack/glance-83db-account-create-update-bw9rr" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.231735 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzdzb\" (UniqueName: \"kubernetes.io/projected/69df27a2-fe75-49cf-88ef-4fba6c4884f9-kube-api-access-lzdzb\") pod \"glance-83db-account-create-update-bw9rr\" (UID: \"69df27a2-fe75-49cf-88ef-4fba6c4884f9\") " pod="openstack/glance-83db-account-create-update-bw9rr" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.319402 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-83db-account-create-update-bw9rr" Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.748892 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-2kbmb"] Dec 08 21:40:17 crc kubenswrapper[4912]: I1208 21:40:17.924816 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-83db-account-create-update-bw9rr"] Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.031362 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.191270 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2kbmb" event={"ID":"b00be803-22da-4dfd-b211-cd9b1a44bf80","Type":"ContainerStarted","Data":"754ac7e3f537f3a47b9805abb1966208987b1e1aa731333ffd8ee1b4f14ce25f"} Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.191320 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2kbmb" event={"ID":"b00be803-22da-4dfd-b211-cd9b1a44bf80","Type":"ContainerStarted","Data":"f71130ea0c1a410e6c39e7f368f7a524c937629edcfb8ebc8d058b61ab087362"} Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.195953 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-83db-account-create-update-bw9rr" event={"ID":"69df27a2-fe75-49cf-88ef-4fba6c4884f9","Type":"ContainerStarted","Data":"8545ec1d97a924e6f106e3d01963c606dae5702448a6ab3b10f2a39f2182d613"} Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.195995 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-83db-account-create-update-bw9rr" event={"ID":"69df27a2-fe75-49cf-88ef-4fba6c4884f9","Type":"ContainerStarted","Data":"ec9579bc959392d20647e66e59ef7e92130afb9d252868a4b2275b241fb1b21a"} Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.207877 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-2kbmb" podStartSLOduration=2.207860343 podStartE2EDuration="2.207860343s" podCreationTimestamp="2025-12-08 21:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:18.206866546 +0000 UTC m=+1300.069868639" watchObservedRunningTime="2025-12-08 21:40:18.207860343 +0000 UTC m=+1300.070862426" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.742929 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:18 crc kubenswrapper[4912]: E1208 21:40:18.743151 4912 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 08 21:40:18 crc kubenswrapper[4912]: E1208 21:40:18.743323 4912 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 08 21:40:18 crc kubenswrapper[4912]: E1208 21:40:18.743381 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift podName:928f7959-0e9f-4b2d-bfa2-2d970196f49f nodeName:}" failed. No retries permitted until 2025-12-08 21:40:22.743365331 +0000 UTC m=+1304.606367414 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift") pod "swift-storage-0" (UID: "928f7959-0e9f-4b2d-bfa2-2d970196f49f") : configmap "swift-ring-files" not found Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.814429 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-83db-account-create-update-bw9rr" podStartSLOduration=2.814407902 podStartE2EDuration="2.814407902s" podCreationTimestamp="2025-12-08 21:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:18.224563277 +0000 UTC m=+1300.087565360" watchObservedRunningTime="2025-12-08 21:40:18.814407902 +0000 UTC m=+1300.677409995" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.820275 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-twtkt"] Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.821619 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.824362 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.824422 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.824555 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.837969 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-twtkt"] Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.945789 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-swiftconf\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.945831 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5x27w\" (UniqueName: \"kubernetes.io/projected/d2fd975a-48c8-42a1-a81d-869c32e97dc8-kube-api-access-5x27w\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.945866 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-combined-ca-bundle\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.946011 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d2fd975a-48c8-42a1-a81d-869c32e97dc8-ring-data-devices\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.946084 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d2fd975a-48c8-42a1-a81d-869c32e97dc8-scripts\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.946185 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-dispersionconf\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:18 crc kubenswrapper[4912]: I1208 21:40:18.946224 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d2fd975a-48c8-42a1-a81d-869c32e97dc8-etc-swift\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.047550 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-combined-ca-bundle\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.047621 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d2fd975a-48c8-42a1-a81d-869c32e97dc8-ring-data-devices\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.047644 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d2fd975a-48c8-42a1-a81d-869c32e97dc8-scripts\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.047712 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-dispersionconf\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.047748 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d2fd975a-48c8-42a1-a81d-869c32e97dc8-etc-swift\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.047778 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-swiftconf\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.047798 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5x27w\" (UniqueName: \"kubernetes.io/projected/d2fd975a-48c8-42a1-a81d-869c32e97dc8-kube-api-access-5x27w\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.048486 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d2fd975a-48c8-42a1-a81d-869c32e97dc8-scripts\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.048738 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d2fd975a-48c8-42a1-a81d-869c32e97dc8-etc-swift\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.049423 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d2fd975a-48c8-42a1-a81d-869c32e97dc8-ring-data-devices\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.055212 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-swiftconf\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.055386 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-dispersionconf\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.055618 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-combined-ca-bundle\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.070015 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5x27w\" (UniqueName: \"kubernetes.io/projected/d2fd975a-48c8-42a1-a81d-869c32e97dc8-kube-api-access-5x27w\") pod \"swift-ring-rebalance-twtkt\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.139132 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.206903 4912 generic.go:334] "Generic (PLEG): container finished" podID="69df27a2-fe75-49cf-88ef-4fba6c4884f9" containerID="8545ec1d97a924e6f106e3d01963c606dae5702448a6ab3b10f2a39f2182d613" exitCode=0 Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.207227 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-83db-account-create-update-bw9rr" event={"ID":"69df27a2-fe75-49cf-88ef-4fba6c4884f9","Type":"ContainerDied","Data":"8545ec1d97a924e6f106e3d01963c606dae5702448a6ab3b10f2a39f2182d613"} Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.210964 4912 generic.go:334] "Generic (PLEG): container finished" podID="b00be803-22da-4dfd-b211-cd9b1a44bf80" containerID="754ac7e3f537f3a47b9805abb1966208987b1e1aa731333ffd8ee1b4f14ce25f" exitCode=0 Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.214899 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2kbmb" event={"ID":"b00be803-22da-4dfd-b211-cd9b1a44bf80","Type":"ContainerDied","Data":"754ac7e3f537f3a47b9805abb1966208987b1e1aa731333ffd8ee1b4f14ce25f"} Dec 08 21:40:19 crc kubenswrapper[4912]: I1208 21:40:19.636728 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-twtkt"] Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.223796 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-twtkt" event={"ID":"d2fd975a-48c8-42a1-a81d-869c32e97dc8","Type":"ContainerStarted","Data":"8075b8a40a4b3a72d53dd331c2ac193a3a527c8c94a198a7404b76f237a114ab"} Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.728222 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-83db-account-create-update-bw9rr" Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.834899 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzdzb\" (UniqueName: \"kubernetes.io/projected/69df27a2-fe75-49cf-88ef-4fba6c4884f9-kube-api-access-lzdzb\") pod \"69df27a2-fe75-49cf-88ef-4fba6c4884f9\" (UID: \"69df27a2-fe75-49cf-88ef-4fba6c4884f9\") " Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.835225 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69df27a2-fe75-49cf-88ef-4fba6c4884f9-operator-scripts\") pod \"69df27a2-fe75-49cf-88ef-4fba6c4884f9\" (UID: \"69df27a2-fe75-49cf-88ef-4fba6c4884f9\") " Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.836233 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69df27a2-fe75-49cf-88ef-4fba6c4884f9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "69df27a2-fe75-49cf-88ef-4fba6c4884f9" (UID: "69df27a2-fe75-49cf-88ef-4fba6c4884f9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.843044 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69df27a2-fe75-49cf-88ef-4fba6c4884f9-kube-api-access-lzdzb" (OuterVolumeSpecName: "kube-api-access-lzdzb") pod "69df27a2-fe75-49cf-88ef-4fba6c4884f9" (UID: "69df27a2-fe75-49cf-88ef-4fba6c4884f9"). InnerVolumeSpecName "kube-api-access-lzdzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.896613 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2kbmb" Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.938088 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b00be803-22da-4dfd-b211-cd9b1a44bf80-operator-scripts\") pod \"b00be803-22da-4dfd-b211-cd9b1a44bf80\" (UID: \"b00be803-22da-4dfd-b211-cd9b1a44bf80\") " Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.938177 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b88n\" (UniqueName: \"kubernetes.io/projected/b00be803-22da-4dfd-b211-cd9b1a44bf80-kube-api-access-2b88n\") pod \"b00be803-22da-4dfd-b211-cd9b1a44bf80\" (UID: \"b00be803-22da-4dfd-b211-cd9b1a44bf80\") " Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.938626 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69df27a2-fe75-49cf-88ef-4fba6c4884f9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.938644 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzdzb\" (UniqueName: \"kubernetes.io/projected/69df27a2-fe75-49cf-88ef-4fba6c4884f9-kube-api-access-lzdzb\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.938741 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b00be803-22da-4dfd-b211-cd9b1a44bf80-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b00be803-22da-4dfd-b211-cd9b1a44bf80" (UID: "b00be803-22da-4dfd-b211-cd9b1a44bf80"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:20 crc kubenswrapper[4912]: I1208 21:40:20.965406 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b00be803-22da-4dfd-b211-cd9b1a44bf80-kube-api-access-2b88n" (OuterVolumeSpecName: "kube-api-access-2b88n") pod "b00be803-22da-4dfd-b211-cd9b1a44bf80" (UID: "b00be803-22da-4dfd-b211-cd9b1a44bf80"). InnerVolumeSpecName "kube-api-access-2b88n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.040705 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b00be803-22da-4dfd-b211-cd9b1a44bf80-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.040742 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b88n\" (UniqueName: \"kubernetes.io/projected/b00be803-22da-4dfd-b211-cd9b1a44bf80-kube-api-access-2b88n\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.185661 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-tf9gs"] Dec 08 21:40:21 crc kubenswrapper[4912]: E1208 21:40:21.186125 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69df27a2-fe75-49cf-88ef-4fba6c4884f9" containerName="mariadb-account-create-update" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.186143 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="69df27a2-fe75-49cf-88ef-4fba6c4884f9" containerName="mariadb-account-create-update" Dec 08 21:40:21 crc kubenswrapper[4912]: E1208 21:40:21.186166 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b00be803-22da-4dfd-b211-cd9b1a44bf80" containerName="mariadb-database-create" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.186173 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="b00be803-22da-4dfd-b211-cd9b1a44bf80" containerName="mariadb-database-create" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.186324 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="69df27a2-fe75-49cf-88ef-4fba6c4884f9" containerName="mariadb-account-create-update" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.186338 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="b00be803-22da-4dfd-b211-cd9b1a44bf80" containerName="mariadb-database-create" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.186939 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-tf9gs" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.193408 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-tf9gs"] Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.239386 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-83db-account-create-update-bw9rr" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.239402 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-83db-account-create-update-bw9rr" event={"ID":"69df27a2-fe75-49cf-88ef-4fba6c4884f9","Type":"ContainerDied","Data":"ec9579bc959392d20647e66e59ef7e92130afb9d252868a4b2275b241fb1b21a"} Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.239504 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec9579bc959392d20647e66e59ef7e92130afb9d252868a4b2275b241fb1b21a" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.242859 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2kbmb" event={"ID":"b00be803-22da-4dfd-b211-cd9b1a44bf80","Type":"ContainerDied","Data":"f71130ea0c1a410e6c39e7f368f7a524c937629edcfb8ebc8d058b61ab087362"} Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.242905 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f71130ea0c1a410e6c39e7f368f7a524c937629edcfb8ebc8d058b61ab087362" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.242971 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2kbmb" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.244113 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmsgp\" (UniqueName: \"kubernetes.io/projected/de10e2f6-f34e-4da7-bab3-a302a691ca52-kube-api-access-nmsgp\") pod \"keystone-db-create-tf9gs\" (UID: \"de10e2f6-f34e-4da7-bab3-a302a691ca52\") " pod="openstack/keystone-db-create-tf9gs" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.244196 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de10e2f6-f34e-4da7-bab3-a302a691ca52-operator-scripts\") pod \"keystone-db-create-tf9gs\" (UID: \"de10e2f6-f34e-4da7-bab3-a302a691ca52\") " pod="openstack/keystone-db-create-tf9gs" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.304517 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-42dd-account-create-update-s4hkv"] Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.305935 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-42dd-account-create-update-s4hkv" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.308390 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.317004 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-42dd-account-create-update-s4hkv"] Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.351641 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmsgp\" (UniqueName: \"kubernetes.io/projected/de10e2f6-f34e-4da7-bab3-a302a691ca52-kube-api-access-nmsgp\") pod \"keystone-db-create-tf9gs\" (UID: \"de10e2f6-f34e-4da7-bab3-a302a691ca52\") " pod="openstack/keystone-db-create-tf9gs" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.351763 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd8c54a-66bf-458c-9838-83214f8fafaa-operator-scripts\") pod \"keystone-42dd-account-create-update-s4hkv\" (UID: \"3dd8c54a-66bf-458c-9838-83214f8fafaa\") " pod="openstack/keystone-42dd-account-create-update-s4hkv" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.351853 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de10e2f6-f34e-4da7-bab3-a302a691ca52-operator-scripts\") pod \"keystone-db-create-tf9gs\" (UID: \"de10e2f6-f34e-4da7-bab3-a302a691ca52\") " pod="openstack/keystone-db-create-tf9gs" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.351987 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb876\" (UniqueName: \"kubernetes.io/projected/3dd8c54a-66bf-458c-9838-83214f8fafaa-kube-api-access-lb876\") pod \"keystone-42dd-account-create-update-s4hkv\" (UID: \"3dd8c54a-66bf-458c-9838-83214f8fafaa\") " pod="openstack/keystone-42dd-account-create-update-s4hkv" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.353576 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de10e2f6-f34e-4da7-bab3-a302a691ca52-operator-scripts\") pod \"keystone-db-create-tf9gs\" (UID: \"de10e2f6-f34e-4da7-bab3-a302a691ca52\") " pod="openstack/keystone-db-create-tf9gs" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.369827 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmsgp\" (UniqueName: \"kubernetes.io/projected/de10e2f6-f34e-4da7-bab3-a302a691ca52-kube-api-access-nmsgp\") pod \"keystone-db-create-tf9gs\" (UID: \"de10e2f6-f34e-4da7-bab3-a302a691ca52\") " pod="openstack/keystone-db-create-tf9gs" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.453657 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd8c54a-66bf-458c-9838-83214f8fafaa-operator-scripts\") pod \"keystone-42dd-account-create-update-s4hkv\" (UID: \"3dd8c54a-66bf-458c-9838-83214f8fafaa\") " pod="openstack/keystone-42dd-account-create-update-s4hkv" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.453985 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb876\" (UniqueName: \"kubernetes.io/projected/3dd8c54a-66bf-458c-9838-83214f8fafaa-kube-api-access-lb876\") pod \"keystone-42dd-account-create-update-s4hkv\" (UID: \"3dd8c54a-66bf-458c-9838-83214f8fafaa\") " pod="openstack/keystone-42dd-account-create-update-s4hkv" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.454621 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd8c54a-66bf-458c-9838-83214f8fafaa-operator-scripts\") pod \"keystone-42dd-account-create-update-s4hkv\" (UID: \"3dd8c54a-66bf-458c-9838-83214f8fafaa\") " pod="openstack/keystone-42dd-account-create-update-s4hkv" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.472097 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb876\" (UniqueName: \"kubernetes.io/projected/3dd8c54a-66bf-458c-9838-83214f8fafaa-kube-api-access-lb876\") pod \"keystone-42dd-account-create-update-s4hkv\" (UID: \"3dd8c54a-66bf-458c-9838-83214f8fafaa\") " pod="openstack/keystone-42dd-account-create-update-s4hkv" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.509684 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-tf9gs" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.623016 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-42dd-account-create-update-s4hkv" Dec 08 21:40:21 crc kubenswrapper[4912]: I1208 21:40:21.987600 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-tf9gs"] Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.148700 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-nftjn"] Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.152786 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.157691 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-nftjn"] Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.161671 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.163549 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-p7sjg" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.169203 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-combined-ca-bundle\") pod \"glance-db-sync-nftjn\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.169327 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-config-data\") pod \"glance-db-sync-nftjn\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.169348 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6zhm\" (UniqueName: \"kubernetes.io/projected/3d4fe027-c837-4bb3-b658-30c00d41ce24-kube-api-access-k6zhm\") pod \"glance-db-sync-nftjn\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.169370 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-db-sync-config-data\") pod \"glance-db-sync-nftjn\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.170356 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-42dd-account-create-update-s4hkv"] Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.254373 4912 generic.go:334] "Generic (PLEG): container finished" podID="f725cb12-94d8-42af-9930-d1d8a17ae9a7" containerID="b7a7b8152abd3fe3d052ba480d146364636edc6a817cd5796935fc522140ae51" exitCode=0 Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.254511 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f725cb12-94d8-42af-9930-d1d8a17ae9a7","Type":"ContainerDied","Data":"b7a7b8152abd3fe3d052ba480d146364636edc6a817cd5796935fc522140ae51"} Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.258098 4912 generic.go:334] "Generic (PLEG): container finished" podID="621df657-49db-4768-8ad5-6676531990d4" containerID="52889e2d0bfeb7bfdb67e9cad760c502263cf08154372116b91e024fe55874ef" exitCode=0 Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.258140 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"621df657-49db-4768-8ad5-6676531990d4","Type":"ContainerDied","Data":"52889e2d0bfeb7bfdb67e9cad760c502263cf08154372116b91e024fe55874ef"} Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.271087 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-config-data\") pod \"glance-db-sync-nftjn\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.272641 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6zhm\" (UniqueName: \"kubernetes.io/projected/3d4fe027-c837-4bb3-b658-30c00d41ce24-kube-api-access-k6zhm\") pod \"glance-db-sync-nftjn\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.272683 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-db-sync-config-data\") pod \"glance-db-sync-nftjn\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.272781 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-combined-ca-bundle\") pod \"glance-db-sync-nftjn\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.279483 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-combined-ca-bundle\") pod \"glance-db-sync-nftjn\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.287414 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-config-data\") pod \"glance-db-sync-nftjn\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.292085 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6zhm\" (UniqueName: \"kubernetes.io/projected/3d4fe027-c837-4bb3-b658-30c00d41ce24-kube-api-access-k6zhm\") pod \"glance-db-sync-nftjn\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.296376 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-db-sync-config-data\") pod \"glance-db-sync-nftjn\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.484106 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:22 crc kubenswrapper[4912]: I1208 21:40:22.788410 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:22 crc kubenswrapper[4912]: E1208 21:40:22.788769 4912 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 08 21:40:22 crc kubenswrapper[4912]: E1208 21:40:22.788789 4912 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 08 21:40:22 crc kubenswrapper[4912]: E1208 21:40:22.788840 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift podName:928f7959-0e9f-4b2d-bfa2-2d970196f49f nodeName:}" failed. No retries permitted until 2025-12-08 21:40:30.788822593 +0000 UTC m=+1312.651824666 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift") pod "swift-storage-0" (UID: "928f7959-0e9f-4b2d-bfa2-2d970196f49f") : configmap "swift-ring-files" not found Dec 08 21:40:24 crc kubenswrapper[4912]: I1208 21:40:24.056227 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:40:24 crc kubenswrapper[4912]: I1208 21:40:24.148256 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-mb992"] Dec 08 21:40:24 crc kubenswrapper[4912]: I1208 21:40:24.148495 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" podUID="ebd46e0e-6292-457b-a817-a9686944fc2e" containerName="dnsmasq-dns" containerID="cri-o://07f4ee88a152e907c7fbbe874a321d6f4dfd31f8776f9c24f043f08f9713ab2b" gracePeriod=10 Dec 08 21:40:25 crc kubenswrapper[4912]: I1208 21:40:25.295697 4912 generic.go:334] "Generic (PLEG): container finished" podID="ebd46e0e-6292-457b-a817-a9686944fc2e" containerID="07f4ee88a152e907c7fbbe874a321d6f4dfd31f8776f9c24f043f08f9713ab2b" exitCode=0 Dec 08 21:40:25 crc kubenswrapper[4912]: I1208 21:40:25.295861 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" event={"ID":"ebd46e0e-6292-457b-a817-a9686944fc2e","Type":"ContainerDied","Data":"07f4ee88a152e907c7fbbe874a321d6f4dfd31f8776f9c24f043f08f9713ab2b"} Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.322540 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-tf9gs" event={"ID":"de10e2f6-f34e-4da7-bab3-a302a691ca52","Type":"ContainerStarted","Data":"e22d73232f5e7360751f013cbccb53ef2086da9a4fc9a5a189ce57634b70d0bc"} Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.324615 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-42dd-account-create-update-s4hkv" event={"ID":"3dd8c54a-66bf-458c-9838-83214f8fafaa","Type":"ContainerStarted","Data":"283fa07d1c52e520a24fc0c0511b2765b9cc0a0f8c0b299c893606cb46206931"} Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.695862 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.753753 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-vwsgt" podUID="0456360f-7543-4af2-ad73-07d0332d3ce2" containerName="ovn-controller" probeResult="failure" output=< Dec 08 21:40:26 crc kubenswrapper[4912]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 08 21:40:26 crc kubenswrapper[4912]: > Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.762532 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-config\") pod \"ebd46e0e-6292-457b-a817-a9686944fc2e\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.763359 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-dns-svc\") pod \"ebd46e0e-6292-457b-a817-a9686944fc2e\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.763485 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cd2l\" (UniqueName: \"kubernetes.io/projected/ebd46e0e-6292-457b-a817-a9686944fc2e-kube-api-access-5cd2l\") pod \"ebd46e0e-6292-457b-a817-a9686944fc2e\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.763745 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-ovsdbserver-nb\") pod \"ebd46e0e-6292-457b-a817-a9686944fc2e\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.763844 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-ovsdbserver-sb\") pod \"ebd46e0e-6292-457b-a817-a9686944fc2e\" (UID: \"ebd46e0e-6292-457b-a817-a9686944fc2e\") " Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.770586 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebd46e0e-6292-457b-a817-a9686944fc2e-kube-api-access-5cd2l" (OuterVolumeSpecName: "kube-api-access-5cd2l") pod "ebd46e0e-6292-457b-a817-a9686944fc2e" (UID: "ebd46e0e-6292-457b-a817-a9686944fc2e"). InnerVolumeSpecName "kube-api-access-5cd2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.823535 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-config" (OuterVolumeSpecName: "config") pod "ebd46e0e-6292-457b-a817-a9686944fc2e" (UID: "ebd46e0e-6292-457b-a817-a9686944fc2e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.828423 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ebd46e0e-6292-457b-a817-a9686944fc2e" (UID: "ebd46e0e-6292-457b-a817-a9686944fc2e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.841705 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ebd46e0e-6292-457b-a817-a9686944fc2e" (UID: "ebd46e0e-6292-457b-a817-a9686944fc2e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.854679 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ebd46e0e-6292-457b-a817-a9686944fc2e" (UID: "ebd46e0e-6292-457b-a817-a9686944fc2e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.865987 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.866016 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.866028 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.866061 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cd2l\" (UniqueName: \"kubernetes.io/projected/ebd46e0e-6292-457b-a817-a9686944fc2e-kube-api-access-5cd2l\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:26 crc kubenswrapper[4912]: I1208 21:40:26.866072 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ebd46e0e-6292-457b-a817-a9686944fc2e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.024721 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-nftjn"] Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.075636 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.085257 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-dx49f" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.314644 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-vwsgt-config-gv7g2"] Dec 08 21:40:27 crc kubenswrapper[4912]: E1208 21:40:27.315375 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebd46e0e-6292-457b-a817-a9686944fc2e" containerName="init" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.315399 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebd46e0e-6292-457b-a817-a9686944fc2e" containerName="init" Dec 08 21:40:27 crc kubenswrapper[4912]: E1208 21:40:27.315430 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebd46e0e-6292-457b-a817-a9686944fc2e" containerName="dnsmasq-dns" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.315441 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebd46e0e-6292-457b-a817-a9686944fc2e" containerName="dnsmasq-dns" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.315663 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebd46e0e-6292-457b-a817-a9686944fc2e" containerName="dnsmasq-dns" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.316403 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.318594 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.326683 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-vwsgt-config-gv7g2"] Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.342520 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f725cb12-94d8-42af-9930-d1d8a17ae9a7","Type":"ContainerStarted","Data":"7cc43359278258bb97d883a5d52995e8e37e86fb164bba679740ff598afaf8c3"} Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.343366 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.360868 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" event={"ID":"ebd46e0e-6292-457b-a817-a9686944fc2e","Type":"ContainerDied","Data":"190aaf0f1bedcc0be30afd5dcffae2f01a233c6b4880d5d1e56fb41e274e68c6"} Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.360920 4912 scope.go:117] "RemoveContainer" containerID="07f4ee88a152e907c7fbbe874a321d6f4dfd31f8776f9c24f043f08f9713ab2b" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.361113 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-mb992" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.366303 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-twtkt" event={"ID":"d2fd975a-48c8-42a1-a81d-869c32e97dc8","Type":"ContainerStarted","Data":"16aeaa40aaeb3ef239ab50d5695f3b3b1edf6cf722c2df7ff754c159311c4ab1"} Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.374952 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"621df657-49db-4768-8ad5-6676531990d4","Type":"ContainerStarted","Data":"22d54f17d64e01198ba4cc0fd93aadf9b5136596b9ee93b8e532db16452b1294"} Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.375418 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.377391 4912 generic.go:334] "Generic (PLEG): container finished" podID="3dd8c54a-66bf-458c-9838-83214f8fafaa" containerID="4669b5ab31cf110fa39e27bab09ab377d03338d59c9311b0d37573fe4eeb2a58" exitCode=0 Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.377450 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-42dd-account-create-update-s4hkv" event={"ID":"3dd8c54a-66bf-458c-9838-83214f8fafaa","Type":"ContainerDied","Data":"4669b5ab31cf110fa39e27bab09ab377d03338d59c9311b0d37573fe4eeb2a58"} Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.379206 4912 generic.go:334] "Generic (PLEG): container finished" podID="de10e2f6-f34e-4da7-bab3-a302a691ca52" containerID="5ef1dc44f0f94d909236d69110da9e533880f4dec69964281cb50574b356560d" exitCode=0 Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.379281 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-tf9gs" event={"ID":"de10e2f6-f34e-4da7-bab3-a302a691ca52","Type":"ContainerDied","Data":"5ef1dc44f0f94d909236d69110da9e533880f4dec69964281cb50574b356560d"} Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.382635 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-nftjn" event={"ID":"3d4fe027-c837-4bb3-b658-30c00d41ce24","Type":"ContainerStarted","Data":"ff83ac72d3b3b3a54dcf9bdbb270e8a4fe497ff1e3278dfbd388a11a6ca67441"} Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.388743 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=41.846950391 podStartE2EDuration="1m10.388702517s" podCreationTimestamp="2025-12-08 21:39:17 +0000 UTC" firstStartedPulling="2025-12-08 21:39:19.453456677 +0000 UTC m=+1241.316458760" lastFinishedPulling="2025-12-08 21:39:47.995208803 +0000 UTC m=+1269.858210886" observedRunningTime="2025-12-08 21:40:27.383011302 +0000 UTC m=+1309.246013385" watchObservedRunningTime="2025-12-08 21:40:27.388702517 +0000 UTC m=+1309.251704600" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.416709 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=42.807340784 podStartE2EDuration="1m11.416695818s" podCreationTimestamp="2025-12-08 21:39:16 +0000 UTC" firstStartedPulling="2025-12-08 21:39:19.450322981 +0000 UTC m=+1241.313325064" lastFinishedPulling="2025-12-08 21:39:48.059678015 +0000 UTC m=+1269.922680098" observedRunningTime="2025-12-08 21:40:27.412659698 +0000 UTC m=+1309.275661781" watchObservedRunningTime="2025-12-08 21:40:27.416695818 +0000 UTC m=+1309.279697901" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.437869 4912 scope.go:117] "RemoveContainer" containerID="88150e1d4ae584b33c4d6ee57c2ffefed7006419ad3f5b5101261b39e135a0d2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.448066 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-twtkt" podStartSLOduration=2.659125104 podStartE2EDuration="9.44804763s" podCreationTimestamp="2025-12-08 21:40:18 +0000 UTC" firstStartedPulling="2025-12-08 21:40:19.642352712 +0000 UTC m=+1301.505354795" lastFinishedPulling="2025-12-08 21:40:26.431275238 +0000 UTC m=+1308.294277321" observedRunningTime="2025-12-08 21:40:27.446586491 +0000 UTC m=+1309.309588574" watchObservedRunningTime="2025-12-08 21:40:27.44804763 +0000 UTC m=+1309.311049713" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.474690 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2201ce89-3c2f-4297-b2f0-3c352ba73088-scripts\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.474791 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfzdw\" (UniqueName: \"kubernetes.io/projected/2201ce89-3c2f-4297-b2f0-3c352ba73088-kube-api-access-bfzdw\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.474856 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2201ce89-3c2f-4297-b2f0-3c352ba73088-additional-scripts\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.475072 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-log-ovn\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.475118 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-run\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.475180 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-run-ovn\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.512772 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-mb992"] Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.521150 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-mb992"] Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.578277 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfzdw\" (UniqueName: \"kubernetes.io/projected/2201ce89-3c2f-4297-b2f0-3c352ba73088-kube-api-access-bfzdw\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.578357 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2201ce89-3c2f-4297-b2f0-3c352ba73088-additional-scripts\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.578522 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-log-ovn\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.578555 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-run\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.578634 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-run-ovn\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.578710 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2201ce89-3c2f-4297-b2f0-3c352ba73088-scripts\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.579431 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-log-ovn\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.579439 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-run-ovn\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.579705 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-run\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.579722 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2201ce89-3c2f-4297-b2f0-3c352ba73088-additional-scripts\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.581593 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2201ce89-3c2f-4297-b2f0-3c352ba73088-scripts\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.623701 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfzdw\" (UniqueName: \"kubernetes.io/projected/2201ce89-3c2f-4297-b2f0-3c352ba73088-kube-api-access-bfzdw\") pod \"ovn-controller-vwsgt-config-gv7g2\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:27 crc kubenswrapper[4912]: I1208 21:40:27.739851 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:28 crc kubenswrapper[4912]: I1208 21:40:28.186875 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-vwsgt-config-gv7g2"] Dec 08 21:40:28 crc kubenswrapper[4912]: W1208 21:40:28.202244 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2201ce89_3c2f_4297_b2f0_3c352ba73088.slice/crio-c628e47787927086cb86d75e395564ac85c200582b924813a05a7fdff9477660 WatchSource:0}: Error finding container c628e47787927086cb86d75e395564ac85c200582b924813a05a7fdff9477660: Status 404 returned error can't find the container with id c628e47787927086cb86d75e395564ac85c200582b924813a05a7fdff9477660 Dec 08 21:40:28 crc kubenswrapper[4912]: I1208 21:40:28.399364 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-vwsgt-config-gv7g2" event={"ID":"2201ce89-3c2f-4297-b2f0-3c352ba73088","Type":"ContainerStarted","Data":"c628e47787927086cb86d75e395564ac85c200582b924813a05a7fdff9477660"} Dec 08 21:40:28 crc kubenswrapper[4912]: I1208 21:40:28.449473 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebd46e0e-6292-457b-a817-a9686944fc2e" path="/var/lib/kubelet/pods/ebd46e0e-6292-457b-a817-a9686944fc2e/volumes" Dec 08 21:40:28 crc kubenswrapper[4912]: I1208 21:40:28.896080 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-tf9gs" Dec 08 21:40:28 crc kubenswrapper[4912]: I1208 21:40:28.903461 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-42dd-account-create-update-s4hkv" Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.001891 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de10e2f6-f34e-4da7-bab3-a302a691ca52-operator-scripts\") pod \"de10e2f6-f34e-4da7-bab3-a302a691ca52\" (UID: \"de10e2f6-f34e-4da7-bab3-a302a691ca52\") " Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.001992 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmsgp\" (UniqueName: \"kubernetes.io/projected/de10e2f6-f34e-4da7-bab3-a302a691ca52-kube-api-access-nmsgp\") pod \"de10e2f6-f34e-4da7-bab3-a302a691ca52\" (UID: \"de10e2f6-f34e-4da7-bab3-a302a691ca52\") " Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.002120 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd8c54a-66bf-458c-9838-83214f8fafaa-operator-scripts\") pod \"3dd8c54a-66bf-458c-9838-83214f8fafaa\" (UID: \"3dd8c54a-66bf-458c-9838-83214f8fafaa\") " Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.002162 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb876\" (UniqueName: \"kubernetes.io/projected/3dd8c54a-66bf-458c-9838-83214f8fafaa-kube-api-access-lb876\") pod \"3dd8c54a-66bf-458c-9838-83214f8fafaa\" (UID: \"3dd8c54a-66bf-458c-9838-83214f8fafaa\") " Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.002396 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de10e2f6-f34e-4da7-bab3-a302a691ca52-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "de10e2f6-f34e-4da7-bab3-a302a691ca52" (UID: "de10e2f6-f34e-4da7-bab3-a302a691ca52"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.002615 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de10e2f6-f34e-4da7-bab3-a302a691ca52-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.002725 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dd8c54a-66bf-458c-9838-83214f8fafaa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3dd8c54a-66bf-458c-9838-83214f8fafaa" (UID: "3dd8c54a-66bf-458c-9838-83214f8fafaa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.008744 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de10e2f6-f34e-4da7-bab3-a302a691ca52-kube-api-access-nmsgp" (OuterVolumeSpecName: "kube-api-access-nmsgp") pod "de10e2f6-f34e-4da7-bab3-a302a691ca52" (UID: "de10e2f6-f34e-4da7-bab3-a302a691ca52"). InnerVolumeSpecName "kube-api-access-nmsgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.010551 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dd8c54a-66bf-458c-9838-83214f8fafaa-kube-api-access-lb876" (OuterVolumeSpecName: "kube-api-access-lb876") pod "3dd8c54a-66bf-458c-9838-83214f8fafaa" (UID: "3dd8c54a-66bf-458c-9838-83214f8fafaa"). InnerVolumeSpecName "kube-api-access-lb876". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.104619 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmsgp\" (UniqueName: \"kubernetes.io/projected/de10e2f6-f34e-4da7-bab3-a302a691ca52-kube-api-access-nmsgp\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.104657 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd8c54a-66bf-458c-9838-83214f8fafaa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.104666 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb876\" (UniqueName: \"kubernetes.io/projected/3dd8c54a-66bf-458c-9838-83214f8fafaa-kube-api-access-lb876\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.412151 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-tf9gs" event={"ID":"de10e2f6-f34e-4da7-bab3-a302a691ca52","Type":"ContainerDied","Data":"e22d73232f5e7360751f013cbccb53ef2086da9a4fc9a5a189ce57634b70d0bc"} Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.414217 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e22d73232f5e7360751f013cbccb53ef2086da9a4fc9a5a189ce57634b70d0bc" Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.412409 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-tf9gs" Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.415010 4912 generic.go:334] "Generic (PLEG): container finished" podID="2201ce89-3c2f-4297-b2f0-3c352ba73088" containerID="cd85b21d8c49c0566a10c3fef4eddd510c5400559d97b73fbc4a60eea732a792" exitCode=0 Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.415285 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-vwsgt-config-gv7g2" event={"ID":"2201ce89-3c2f-4297-b2f0-3c352ba73088","Type":"ContainerDied","Data":"cd85b21d8c49c0566a10c3fef4eddd510c5400559d97b73fbc4a60eea732a792"} Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.417070 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-42dd-account-create-update-s4hkv" event={"ID":"3dd8c54a-66bf-458c-9838-83214f8fafaa","Type":"ContainerDied","Data":"283fa07d1c52e520a24fc0c0511b2765b9cc0a0f8c0b299c893606cb46206931"} Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.417134 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="283fa07d1c52e520a24fc0c0511b2765b9cc0a0f8c0b299c893606cb46206931" Dec 08 21:40:29 crc kubenswrapper[4912]: I1208 21:40:29.417282 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-42dd-account-create-update-s4hkv" Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.834361 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:30 crc kubenswrapper[4912]: E1208 21:40:30.834670 4912 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 08 21:40:30 crc kubenswrapper[4912]: E1208 21:40:30.834904 4912 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 08 21:40:30 crc kubenswrapper[4912]: E1208 21:40:30.835002 4912 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift podName:928f7959-0e9f-4b2d-bfa2-2d970196f49f nodeName:}" failed. No retries permitted until 2025-12-08 21:40:46.834973049 +0000 UTC m=+1328.697975132 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift") pod "swift-storage-0" (UID: "928f7959-0e9f-4b2d-bfa2-2d970196f49f") : configmap "swift-ring-files" not found Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.890710 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.936242 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfzdw\" (UniqueName: \"kubernetes.io/projected/2201ce89-3c2f-4297-b2f0-3c352ba73088-kube-api-access-bfzdw\") pod \"2201ce89-3c2f-4297-b2f0-3c352ba73088\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.936310 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-run\") pod \"2201ce89-3c2f-4297-b2f0-3c352ba73088\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.936489 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-log-ovn\") pod \"2201ce89-3c2f-4297-b2f0-3c352ba73088\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.936513 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-run" (OuterVolumeSpecName: "var-run") pod "2201ce89-3c2f-4297-b2f0-3c352ba73088" (UID: "2201ce89-3c2f-4297-b2f0-3c352ba73088"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.936535 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "2201ce89-3c2f-4297-b2f0-3c352ba73088" (UID: "2201ce89-3c2f-4297-b2f0-3c352ba73088"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.936579 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2201ce89-3c2f-4297-b2f0-3c352ba73088-additional-scripts\") pod \"2201ce89-3c2f-4297-b2f0-3c352ba73088\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.936696 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-run-ovn\") pod \"2201ce89-3c2f-4297-b2f0-3c352ba73088\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.936726 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2201ce89-3c2f-4297-b2f0-3c352ba73088-scripts\") pod \"2201ce89-3c2f-4297-b2f0-3c352ba73088\" (UID: \"2201ce89-3c2f-4297-b2f0-3c352ba73088\") " Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.936810 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "2201ce89-3c2f-4297-b2f0-3c352ba73088" (UID: "2201ce89-3c2f-4297-b2f0-3c352ba73088"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.937136 4912 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.937150 4912 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.937159 4912 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2201ce89-3c2f-4297-b2f0-3c352ba73088-var-run\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.937459 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2201ce89-3c2f-4297-b2f0-3c352ba73088-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "2201ce89-3c2f-4297-b2f0-3c352ba73088" (UID: "2201ce89-3c2f-4297-b2f0-3c352ba73088"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.937631 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2201ce89-3c2f-4297-b2f0-3c352ba73088-scripts" (OuterVolumeSpecName: "scripts") pod "2201ce89-3c2f-4297-b2f0-3c352ba73088" (UID: "2201ce89-3c2f-4297-b2f0-3c352ba73088"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:30 crc kubenswrapper[4912]: I1208 21:40:30.946142 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2201ce89-3c2f-4297-b2f0-3c352ba73088-kube-api-access-bfzdw" (OuterVolumeSpecName: "kube-api-access-bfzdw") pod "2201ce89-3c2f-4297-b2f0-3c352ba73088" (UID: "2201ce89-3c2f-4297-b2f0-3c352ba73088"). InnerVolumeSpecName "kube-api-access-bfzdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:31 crc kubenswrapper[4912]: I1208 21:40:31.038454 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfzdw\" (UniqueName: \"kubernetes.io/projected/2201ce89-3c2f-4297-b2f0-3c352ba73088-kube-api-access-bfzdw\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:31 crc kubenswrapper[4912]: I1208 21:40:31.038481 4912 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2201ce89-3c2f-4297-b2f0-3c352ba73088-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:31 crc kubenswrapper[4912]: I1208 21:40:31.038492 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2201ce89-3c2f-4297-b2f0-3c352ba73088-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:31 crc kubenswrapper[4912]: I1208 21:40:31.434433 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-vwsgt-config-gv7g2" event={"ID":"2201ce89-3c2f-4297-b2f0-3c352ba73088","Type":"ContainerDied","Data":"c628e47787927086cb86d75e395564ac85c200582b924813a05a7fdff9477660"} Dec 08 21:40:31 crc kubenswrapper[4912]: I1208 21:40:31.434470 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c628e47787927086cb86d75e395564ac85c200582b924813a05a7fdff9477660" Dec 08 21:40:31 crc kubenswrapper[4912]: I1208 21:40:31.434537 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-vwsgt-config-gv7g2" Dec 08 21:40:31 crc kubenswrapper[4912]: I1208 21:40:31.782722 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-vwsgt" Dec 08 21:40:32 crc kubenswrapper[4912]: I1208 21:40:32.054234 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-vwsgt-config-gv7g2"] Dec 08 21:40:32 crc kubenswrapper[4912]: I1208 21:40:32.067984 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-vwsgt-config-gv7g2"] Dec 08 21:40:32 crc kubenswrapper[4912]: I1208 21:40:32.445735 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2201ce89-3c2f-4297-b2f0-3c352ba73088" path="/var/lib/kubelet/pods/2201ce89-3c2f-4297-b2f0-3c352ba73088/volumes" Dec 08 21:40:37 crc kubenswrapper[4912]: I1208 21:40:37.489973 4912 generic.go:334] "Generic (PLEG): container finished" podID="d2fd975a-48c8-42a1-a81d-869c32e97dc8" containerID="16aeaa40aaeb3ef239ab50d5695f3b3b1edf6cf722c2df7ff754c159311c4ab1" exitCode=0 Dec 08 21:40:37 crc kubenswrapper[4912]: I1208 21:40:37.490059 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-twtkt" event={"ID":"d2fd975a-48c8-42a1-a81d-869c32e97dc8","Type":"ContainerDied","Data":"16aeaa40aaeb3ef239ab50d5695f3b3b1edf6cf722c2df7ff754c159311c4ab1"} Dec 08 21:40:38 crc kubenswrapper[4912]: I1208 21:40:38.617298 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 08 21:40:38 crc kubenswrapper[4912]: I1208 21:40:38.695945 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.026477 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-mr8rp"] Dec 08 21:40:39 crc kubenswrapper[4912]: E1208 21:40:39.027013 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de10e2f6-f34e-4da7-bab3-a302a691ca52" containerName="mariadb-database-create" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.027028 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="de10e2f6-f34e-4da7-bab3-a302a691ca52" containerName="mariadb-database-create" Dec 08 21:40:39 crc kubenswrapper[4912]: E1208 21:40:39.027062 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd8c54a-66bf-458c-9838-83214f8fafaa" containerName="mariadb-account-create-update" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.027070 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd8c54a-66bf-458c-9838-83214f8fafaa" containerName="mariadb-account-create-update" Dec 08 21:40:39 crc kubenswrapper[4912]: E1208 21:40:39.027091 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2201ce89-3c2f-4297-b2f0-3c352ba73088" containerName="ovn-config" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.027100 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="2201ce89-3c2f-4297-b2f0-3c352ba73088" containerName="ovn-config" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.027344 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dd8c54a-66bf-458c-9838-83214f8fafaa" containerName="mariadb-account-create-update" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.027383 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="2201ce89-3c2f-4297-b2f0-3c352ba73088" containerName="ovn-config" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.027405 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="de10e2f6-f34e-4da7-bab3-a302a691ca52" containerName="mariadb-database-create" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.028311 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mr8rp" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.045297 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-fbd8-account-create-update-8cvpw"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.046628 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-fbd8-account-create-update-8cvpw" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.051305 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.053730 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mr8rp"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.092729 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-fbd8-account-create-update-8cvpw"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.143980 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58frt\" (UniqueName: \"kubernetes.io/projected/f39e3e23-7dbe-49d6-9159-258cb947b761-kube-api-access-58frt\") pod \"cinder-fbd8-account-create-update-8cvpw\" (UID: \"f39e3e23-7dbe-49d6-9159-258cb947b761\") " pod="openstack/cinder-fbd8-account-create-update-8cvpw" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.144341 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b52032bd-e87f-4e3f-9502-847a57d802e4-operator-scripts\") pod \"cinder-db-create-mr8rp\" (UID: \"b52032bd-e87f-4e3f-9502-847a57d802e4\") " pod="openstack/cinder-db-create-mr8rp" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.144411 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f39e3e23-7dbe-49d6-9159-258cb947b761-operator-scripts\") pod \"cinder-fbd8-account-create-update-8cvpw\" (UID: \"f39e3e23-7dbe-49d6-9159-258cb947b761\") " pod="openstack/cinder-fbd8-account-create-update-8cvpw" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.144484 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqhj2\" (UniqueName: \"kubernetes.io/projected/b52032bd-e87f-4e3f-9502-847a57d802e4-kube-api-access-rqhj2\") pod \"cinder-db-create-mr8rp\" (UID: \"b52032bd-e87f-4e3f-9502-847a57d802e4\") " pod="openstack/cinder-db-create-mr8rp" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.248492 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f39e3e23-7dbe-49d6-9159-258cb947b761-operator-scripts\") pod \"cinder-fbd8-account-create-update-8cvpw\" (UID: \"f39e3e23-7dbe-49d6-9159-258cb947b761\") " pod="openstack/cinder-fbd8-account-create-update-8cvpw" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.248566 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqhj2\" (UniqueName: \"kubernetes.io/projected/b52032bd-e87f-4e3f-9502-847a57d802e4-kube-api-access-rqhj2\") pod \"cinder-db-create-mr8rp\" (UID: \"b52032bd-e87f-4e3f-9502-847a57d802e4\") " pod="openstack/cinder-db-create-mr8rp" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.248700 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58frt\" (UniqueName: \"kubernetes.io/projected/f39e3e23-7dbe-49d6-9159-258cb947b761-kube-api-access-58frt\") pod \"cinder-fbd8-account-create-update-8cvpw\" (UID: \"f39e3e23-7dbe-49d6-9159-258cb947b761\") " pod="openstack/cinder-fbd8-account-create-update-8cvpw" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.248764 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b52032bd-e87f-4e3f-9502-847a57d802e4-operator-scripts\") pod \"cinder-db-create-mr8rp\" (UID: \"b52032bd-e87f-4e3f-9502-847a57d802e4\") " pod="openstack/cinder-db-create-mr8rp" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.249610 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b52032bd-e87f-4e3f-9502-847a57d802e4-operator-scripts\") pod \"cinder-db-create-mr8rp\" (UID: \"b52032bd-e87f-4e3f-9502-847a57d802e4\") " pod="openstack/cinder-db-create-mr8rp" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.250272 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f39e3e23-7dbe-49d6-9159-258cb947b761-operator-scripts\") pod \"cinder-fbd8-account-create-update-8cvpw\" (UID: \"f39e3e23-7dbe-49d6-9159-258cb947b761\") " pod="openstack/cinder-fbd8-account-create-update-8cvpw" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.288497 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqhj2\" (UniqueName: \"kubernetes.io/projected/b52032bd-e87f-4e3f-9502-847a57d802e4-kube-api-access-rqhj2\") pod \"cinder-db-create-mr8rp\" (UID: \"b52032bd-e87f-4e3f-9502-847a57d802e4\") " pod="openstack/cinder-db-create-mr8rp" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.291206 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58frt\" (UniqueName: \"kubernetes.io/projected/f39e3e23-7dbe-49d6-9159-258cb947b761-kube-api-access-58frt\") pod \"cinder-fbd8-account-create-update-8cvpw\" (UID: \"f39e3e23-7dbe-49d6-9159-258cb947b761\") " pod="openstack/cinder-fbd8-account-create-update-8cvpw" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.306270 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-wlxkk"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.307370 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-wlxkk" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.319143 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-wlxkk"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.335220 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-ed29-account-create-update-bgmtf"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.336412 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-ed29-account-create-update-bgmtf" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.340133 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.399722 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mr8rp" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.400380 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-fbd8-account-create-update-8cvpw" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.401503 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb52v\" (UniqueName: \"kubernetes.io/projected/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf-kube-api-access-tb52v\") pod \"barbican-db-create-wlxkk\" (UID: \"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf\") " pod="openstack/barbican-db-create-wlxkk" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.401641 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7hsd\" (UniqueName: \"kubernetes.io/projected/85aef8d3-55b7-44b3-81db-c84293e8c5fd-kube-api-access-n7hsd\") pod \"barbican-ed29-account-create-update-bgmtf\" (UID: \"85aef8d3-55b7-44b3-81db-c84293e8c5fd\") " pod="openstack/barbican-ed29-account-create-update-bgmtf" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.401675 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85aef8d3-55b7-44b3-81db-c84293e8c5fd-operator-scripts\") pod \"barbican-ed29-account-create-update-bgmtf\" (UID: \"85aef8d3-55b7-44b3-81db-c84293e8c5fd\") " pod="openstack/barbican-ed29-account-create-update-bgmtf" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.401797 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf-operator-scripts\") pod \"barbican-db-create-wlxkk\" (UID: \"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf\") " pod="openstack/barbican-db-create-wlxkk" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.409587 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-ed29-account-create-update-bgmtf"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.421633 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-vs7fn"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.423104 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.425738 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-c727c" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.425826 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.426593 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.428709 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.438602 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-vs7fn"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.503841 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf-operator-scripts\") pod \"barbican-db-create-wlxkk\" (UID: \"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf\") " pod="openstack/barbican-db-create-wlxkk" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.503903 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22da54af-1006-403d-b33a-ae71353ee4e6-config-data\") pod \"keystone-db-sync-vs7fn\" (UID: \"22da54af-1006-403d-b33a-ae71353ee4e6\") " pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.503962 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkvbm\" (UniqueName: \"kubernetes.io/projected/22da54af-1006-403d-b33a-ae71353ee4e6-kube-api-access-hkvbm\") pod \"keystone-db-sync-vs7fn\" (UID: \"22da54af-1006-403d-b33a-ae71353ee4e6\") " pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.504012 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb52v\" (UniqueName: \"kubernetes.io/projected/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf-kube-api-access-tb52v\") pod \"barbican-db-create-wlxkk\" (UID: \"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf\") " pod="openstack/barbican-db-create-wlxkk" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.504068 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22da54af-1006-403d-b33a-ae71353ee4e6-combined-ca-bundle\") pod \"keystone-db-sync-vs7fn\" (UID: \"22da54af-1006-403d-b33a-ae71353ee4e6\") " pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.504139 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85aef8d3-55b7-44b3-81db-c84293e8c5fd-operator-scripts\") pod \"barbican-ed29-account-create-update-bgmtf\" (UID: \"85aef8d3-55b7-44b3-81db-c84293e8c5fd\") " pod="openstack/barbican-ed29-account-create-update-bgmtf" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.504164 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7hsd\" (UniqueName: \"kubernetes.io/projected/85aef8d3-55b7-44b3-81db-c84293e8c5fd-kube-api-access-n7hsd\") pod \"barbican-ed29-account-create-update-bgmtf\" (UID: \"85aef8d3-55b7-44b3-81db-c84293e8c5fd\") " pod="openstack/barbican-ed29-account-create-update-bgmtf" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.511636 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-vd8lj"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.512539 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85aef8d3-55b7-44b3-81db-c84293e8c5fd-operator-scripts\") pod \"barbican-ed29-account-create-update-bgmtf\" (UID: \"85aef8d3-55b7-44b3-81db-c84293e8c5fd\") " pod="openstack/barbican-ed29-account-create-update-bgmtf" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.512739 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf-operator-scripts\") pod \"barbican-db-create-wlxkk\" (UID: \"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf\") " pod="openstack/barbican-db-create-wlxkk" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.512847 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vd8lj" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.530782 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7hsd\" (UniqueName: \"kubernetes.io/projected/85aef8d3-55b7-44b3-81db-c84293e8c5fd-kube-api-access-n7hsd\") pod \"barbican-ed29-account-create-update-bgmtf\" (UID: \"85aef8d3-55b7-44b3-81db-c84293e8c5fd\") " pod="openstack/barbican-ed29-account-create-update-bgmtf" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.540615 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-vd8lj"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.552147 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb52v\" (UniqueName: \"kubernetes.io/projected/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf-kube-api-access-tb52v\") pod \"barbican-db-create-wlxkk\" (UID: \"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf\") " pod="openstack/barbican-db-create-wlxkk" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.560675 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-957f-account-create-update-9hlk9"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.561891 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-957f-account-create-update-9hlk9" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.564010 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.573345 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-957f-account-create-update-9hlk9"] Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.604991 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74214961-90de-45d4-8b70-a53db54e6a8c-operator-scripts\") pod \"neutron-db-create-vd8lj\" (UID: \"74214961-90de-45d4-8b70-a53db54e6a8c\") " pod="openstack/neutron-db-create-vd8lj" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.605376 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkkgd\" (UniqueName: \"kubernetes.io/projected/545dd2e8-5f39-4673-b406-f42be1033a46-kube-api-access-gkkgd\") pod \"neutron-957f-account-create-update-9hlk9\" (UID: \"545dd2e8-5f39-4673-b406-f42be1033a46\") " pod="openstack/neutron-957f-account-create-update-9hlk9" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.605545 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22da54af-1006-403d-b33a-ae71353ee4e6-config-data\") pod \"keystone-db-sync-vs7fn\" (UID: \"22da54af-1006-403d-b33a-ae71353ee4e6\") " pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.605687 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxdj5\" (UniqueName: \"kubernetes.io/projected/74214961-90de-45d4-8b70-a53db54e6a8c-kube-api-access-lxdj5\") pod \"neutron-db-create-vd8lj\" (UID: \"74214961-90de-45d4-8b70-a53db54e6a8c\") " pod="openstack/neutron-db-create-vd8lj" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.605789 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkvbm\" (UniqueName: \"kubernetes.io/projected/22da54af-1006-403d-b33a-ae71353ee4e6-kube-api-access-hkvbm\") pod \"keystone-db-sync-vs7fn\" (UID: \"22da54af-1006-403d-b33a-ae71353ee4e6\") " pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.605898 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/545dd2e8-5f39-4673-b406-f42be1033a46-operator-scripts\") pod \"neutron-957f-account-create-update-9hlk9\" (UID: \"545dd2e8-5f39-4673-b406-f42be1033a46\") " pod="openstack/neutron-957f-account-create-update-9hlk9" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.605997 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22da54af-1006-403d-b33a-ae71353ee4e6-combined-ca-bundle\") pod \"keystone-db-sync-vs7fn\" (UID: \"22da54af-1006-403d-b33a-ae71353ee4e6\") " pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.609884 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22da54af-1006-403d-b33a-ae71353ee4e6-combined-ca-bundle\") pod \"keystone-db-sync-vs7fn\" (UID: \"22da54af-1006-403d-b33a-ae71353ee4e6\") " pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.610867 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22da54af-1006-403d-b33a-ae71353ee4e6-config-data\") pod \"keystone-db-sync-vs7fn\" (UID: \"22da54af-1006-403d-b33a-ae71353ee4e6\") " pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.628253 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkvbm\" (UniqueName: \"kubernetes.io/projected/22da54af-1006-403d-b33a-ae71353ee4e6-kube-api-access-hkvbm\") pod \"keystone-db-sync-vs7fn\" (UID: \"22da54af-1006-403d-b33a-ae71353ee4e6\") " pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.706700 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkkgd\" (UniqueName: \"kubernetes.io/projected/545dd2e8-5f39-4673-b406-f42be1033a46-kube-api-access-gkkgd\") pod \"neutron-957f-account-create-update-9hlk9\" (UID: \"545dd2e8-5f39-4673-b406-f42be1033a46\") " pod="openstack/neutron-957f-account-create-update-9hlk9" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.706831 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxdj5\" (UniqueName: \"kubernetes.io/projected/74214961-90de-45d4-8b70-a53db54e6a8c-kube-api-access-lxdj5\") pod \"neutron-db-create-vd8lj\" (UID: \"74214961-90de-45d4-8b70-a53db54e6a8c\") " pod="openstack/neutron-db-create-vd8lj" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.706881 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/545dd2e8-5f39-4673-b406-f42be1033a46-operator-scripts\") pod \"neutron-957f-account-create-update-9hlk9\" (UID: \"545dd2e8-5f39-4673-b406-f42be1033a46\") " pod="openstack/neutron-957f-account-create-update-9hlk9" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.706946 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74214961-90de-45d4-8b70-a53db54e6a8c-operator-scripts\") pod \"neutron-db-create-vd8lj\" (UID: \"74214961-90de-45d4-8b70-a53db54e6a8c\") " pod="openstack/neutron-db-create-vd8lj" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.707709 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74214961-90de-45d4-8b70-a53db54e6a8c-operator-scripts\") pod \"neutron-db-create-vd8lj\" (UID: \"74214961-90de-45d4-8b70-a53db54e6a8c\") " pod="openstack/neutron-db-create-vd8lj" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.707968 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/545dd2e8-5f39-4673-b406-f42be1033a46-operator-scripts\") pod \"neutron-957f-account-create-update-9hlk9\" (UID: \"545dd2e8-5f39-4673-b406-f42be1033a46\") " pod="openstack/neutron-957f-account-create-update-9hlk9" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.724322 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxdj5\" (UniqueName: \"kubernetes.io/projected/74214961-90de-45d4-8b70-a53db54e6a8c-kube-api-access-lxdj5\") pod \"neutron-db-create-vd8lj\" (UID: \"74214961-90de-45d4-8b70-a53db54e6a8c\") " pod="openstack/neutron-db-create-vd8lj" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.725624 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkkgd\" (UniqueName: \"kubernetes.io/projected/545dd2e8-5f39-4673-b406-f42be1033a46-kube-api-access-gkkgd\") pod \"neutron-957f-account-create-update-9hlk9\" (UID: \"545dd2e8-5f39-4673-b406-f42be1033a46\") " pod="openstack/neutron-957f-account-create-update-9hlk9" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.741121 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-wlxkk" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.755876 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-ed29-account-create-update-bgmtf" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.764062 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.829966 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vd8lj" Dec 08 21:40:39 crc kubenswrapper[4912]: I1208 21:40:39.951845 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-957f-account-create-update-9hlk9" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.000469 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.152964 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d2fd975a-48c8-42a1-a81d-869c32e97dc8-scripts\") pod \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.153025 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-combined-ca-bundle\") pod \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.153102 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-dispersionconf\") pod \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.153967 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-swiftconf\") pod \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.153996 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d2fd975a-48c8-42a1-a81d-869c32e97dc8-ring-data-devices\") pod \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.154072 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d2fd975a-48c8-42a1-a81d-869c32e97dc8-etc-swift\") pod \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.154195 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5x27w\" (UniqueName: \"kubernetes.io/projected/d2fd975a-48c8-42a1-a81d-869c32e97dc8-kube-api-access-5x27w\") pod \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\" (UID: \"d2fd975a-48c8-42a1-a81d-869c32e97dc8\") " Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.155012 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2fd975a-48c8-42a1-a81d-869c32e97dc8-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "d2fd975a-48c8-42a1-a81d-869c32e97dc8" (UID: "d2fd975a-48c8-42a1-a81d-869c32e97dc8"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.162758 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2fd975a-48c8-42a1-a81d-869c32e97dc8-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d2fd975a-48c8-42a1-a81d-869c32e97dc8" (UID: "d2fd975a-48c8-42a1-a81d-869c32e97dc8"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.164803 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "d2fd975a-48c8-42a1-a81d-869c32e97dc8" (UID: "d2fd975a-48c8-42a1-a81d-869c32e97dc8"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.164952 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2fd975a-48c8-42a1-a81d-869c32e97dc8-kube-api-access-5x27w" (OuterVolumeSpecName: "kube-api-access-5x27w") pod "d2fd975a-48c8-42a1-a81d-869c32e97dc8" (UID: "d2fd975a-48c8-42a1-a81d-869c32e97dc8"). InnerVolumeSpecName "kube-api-access-5x27w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.200574 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d2fd975a-48c8-42a1-a81d-869c32e97dc8" (UID: "d2fd975a-48c8-42a1-a81d-869c32e97dc8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.220081 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "d2fd975a-48c8-42a1-a81d-869c32e97dc8" (UID: "d2fd975a-48c8-42a1-a81d-869c32e97dc8"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.233697 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2fd975a-48c8-42a1-a81d-869c32e97dc8-scripts" (OuterVolumeSpecName: "scripts") pod "d2fd975a-48c8-42a1-a81d-869c32e97dc8" (UID: "d2fd975a-48c8-42a1-a81d-869c32e97dc8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.256179 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5x27w\" (UniqueName: \"kubernetes.io/projected/d2fd975a-48c8-42a1-a81d-869c32e97dc8-kube-api-access-5x27w\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.256221 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d2fd975a-48c8-42a1-a81d-869c32e97dc8-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.256233 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.256241 4912 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.256249 4912 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d2fd975a-48c8-42a1-a81d-869c32e97dc8-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.256259 4912 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d2fd975a-48c8-42a1-a81d-869c32e97dc8-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.256270 4912 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d2fd975a-48c8-42a1-a81d-869c32e97dc8-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.468661 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mr8rp"] Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.546523 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mr8rp" event={"ID":"b52032bd-e87f-4e3f-9502-847a57d802e4","Type":"ContainerStarted","Data":"21c9586434672a9bae31d9c8ec3663a69e0e47d148c6f8b3b159fa17fafa835e"} Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.549632 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-twtkt" event={"ID":"d2fd975a-48c8-42a1-a81d-869c32e97dc8","Type":"ContainerDied","Data":"8075b8a40a4b3a72d53dd331c2ac193a3a527c8c94a198a7404b76f237a114ab"} Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.549684 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8075b8a40a4b3a72d53dd331c2ac193a3a527c8c94a198a7404b76f237a114ab" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.549764 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-twtkt" Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.703979 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-fbd8-account-create-update-8cvpw"] Dec 08 21:40:42 crc kubenswrapper[4912]: W1208 21:40:42.708427 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf39e3e23_7dbe_49d6_9159_258cb947b761.slice/crio-afd4b48ce339853f7556102008ab2467ffb9323c0649ca1a9efbc69eb37eb86f WatchSource:0}: Error finding container afd4b48ce339853f7556102008ab2467ffb9323c0649ca1a9efbc69eb37eb86f: Status 404 returned error can't find the container with id afd4b48ce339853f7556102008ab2467ffb9323c0649ca1a9efbc69eb37eb86f Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.728918 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-957f-account-create-update-9hlk9"] Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.746514 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-vd8lj"] Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.835508 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-wlxkk"] Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.853160 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-vs7fn"] Dec 08 21:40:42 crc kubenswrapper[4912]: I1208 21:40:42.965225 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-ed29-account-create-update-bgmtf"] Dec 08 21:40:42 crc kubenswrapper[4912]: W1208 21:40:42.976897 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85aef8d3_55b7_44b3_81db_c84293e8c5fd.slice/crio-55163cbfc624f5c6ec043980fc873de1dc3b3e8b9bc3c8df53287887395fab39 WatchSource:0}: Error finding container 55163cbfc624f5c6ec043980fc873de1dc3b3e8b9bc3c8df53287887395fab39: Status 404 returned error can't find the container with id 55163cbfc624f5c6ec043980fc873de1dc3b3e8b9bc3c8df53287887395fab39 Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.557907 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vs7fn" event={"ID":"22da54af-1006-403d-b33a-ae71353ee4e6","Type":"ContainerStarted","Data":"ade9e87f69cd3478739995918433a61ac7117787660749bd756661451ee1bd3d"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.558952 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-nftjn" event={"ID":"3d4fe027-c837-4bb3-b658-30c00d41ce24","Type":"ContainerStarted","Data":"c6d6e46d6e5b92add6b665e1dc856c21546d651ba2bb3c43f6629b60b35371fb"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.563061 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-957f-account-create-update-9hlk9" event={"ID":"545dd2e8-5f39-4673-b406-f42be1033a46","Type":"ContainerStarted","Data":"1728362c583209638b70a9b644027db1090af689cf56281745c8a5be0ed4a21b"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.563099 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-957f-account-create-update-9hlk9" event={"ID":"545dd2e8-5f39-4673-b406-f42be1033a46","Type":"ContainerStarted","Data":"c7ba97b506ceb11e5bdd1533baf452236a7531ad047ed39f92872cad35c55129"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.564711 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vd8lj" event={"ID":"74214961-90de-45d4-8b70-a53db54e6a8c","Type":"ContainerStarted","Data":"bc9f7995cc9b298f6dda16ff398900d2f4ca33353c9e414f3a70a3c00e8f7811"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.564741 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vd8lj" event={"ID":"74214961-90de-45d4-8b70-a53db54e6a8c","Type":"ContainerStarted","Data":"cfc8f349fae7d409667537ad42ca918cbf8e66bae164819a81fc0cb046e0f8bf"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.566648 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-fbd8-account-create-update-8cvpw" event={"ID":"f39e3e23-7dbe-49d6-9159-258cb947b761","Type":"ContainerStarted","Data":"7464cc8c599b7d767baa3b5effc54dd136cf4fcec755ca4de4939e5556bd1665"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.566707 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-fbd8-account-create-update-8cvpw" event={"ID":"f39e3e23-7dbe-49d6-9159-258cb947b761","Type":"ContainerStarted","Data":"afd4b48ce339853f7556102008ab2467ffb9323c0649ca1a9efbc69eb37eb86f"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.568205 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-ed29-account-create-update-bgmtf" event={"ID":"85aef8d3-55b7-44b3-81db-c84293e8c5fd","Type":"ContainerStarted","Data":"d95722f90a5682996e53dd3a24fc47ac1e9f9f6a2ceb792e31bae4626d68fd2a"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.568250 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-ed29-account-create-update-bgmtf" event={"ID":"85aef8d3-55b7-44b3-81db-c84293e8c5fd","Type":"ContainerStarted","Data":"55163cbfc624f5c6ec043980fc873de1dc3b3e8b9bc3c8df53287887395fab39"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.570409 4912 generic.go:334] "Generic (PLEG): container finished" podID="b52032bd-e87f-4e3f-9502-847a57d802e4" containerID="12a1f277452a9245132795a91cc6458463dca71fc4d1dbf77f5fea7b513ca1b6" exitCode=0 Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.570485 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mr8rp" event={"ID":"b52032bd-e87f-4e3f-9502-847a57d802e4","Type":"ContainerDied","Data":"12a1f277452a9245132795a91cc6458463dca71fc4d1dbf77f5fea7b513ca1b6"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.572561 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-wlxkk" event={"ID":"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf","Type":"ContainerStarted","Data":"62ea6c5140e63b84bfbe83ed0c1b329312ad81a300301341b186a345f421e954"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.572589 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-wlxkk" event={"ID":"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf","Type":"ContainerStarted","Data":"1e776de794fb20005cc9e406268479585c13cb7db9aa61c4b17a333538be16b7"} Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.582084 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-nftjn" podStartSLOduration=6.562018506 podStartE2EDuration="21.582067515s" podCreationTimestamp="2025-12-08 21:40:22 +0000 UTC" firstStartedPulling="2025-12-08 21:40:27.023063757 +0000 UTC m=+1308.886065860" lastFinishedPulling="2025-12-08 21:40:42.043112776 +0000 UTC m=+1323.906114869" observedRunningTime="2025-12-08 21:40:43.575262539 +0000 UTC m=+1325.438264622" watchObservedRunningTime="2025-12-08 21:40:43.582067515 +0000 UTC m=+1325.445069598" Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.598996 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-fbd8-account-create-update-8cvpw" podStartSLOduration=5.598980754 podStartE2EDuration="5.598980754s" podCreationTimestamp="2025-12-08 21:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:43.594293917 +0000 UTC m=+1325.457296020" watchObservedRunningTime="2025-12-08 21:40:43.598980754 +0000 UTC m=+1325.461982837" Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.613977 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-wlxkk" podStartSLOduration=4.613954271 podStartE2EDuration="4.613954271s" podCreationTimestamp="2025-12-08 21:40:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:43.608825862 +0000 UTC m=+1325.471827945" watchObservedRunningTime="2025-12-08 21:40:43.613954271 +0000 UTC m=+1325.476956354" Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.626792 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-vd8lj" podStartSLOduration=4.62677226 podStartE2EDuration="4.62677226s" podCreationTimestamp="2025-12-08 21:40:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:43.626300477 +0000 UTC m=+1325.489302560" watchObservedRunningTime="2025-12-08 21:40:43.62677226 +0000 UTC m=+1325.489774343" Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.646791 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-957f-account-create-update-9hlk9" podStartSLOduration=4.646763483 podStartE2EDuration="4.646763483s" podCreationTimestamp="2025-12-08 21:40:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:43.639025843 +0000 UTC m=+1325.502027926" watchObservedRunningTime="2025-12-08 21:40:43.646763483 +0000 UTC m=+1325.509765566" Dec 08 21:40:43 crc kubenswrapper[4912]: I1208 21:40:43.657321 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-ed29-account-create-update-bgmtf" podStartSLOduration=4.65729955 podStartE2EDuration="4.65729955s" podCreationTimestamp="2025-12-08 21:40:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:43.654275188 +0000 UTC m=+1325.517277271" watchObservedRunningTime="2025-12-08 21:40:43.65729955 +0000 UTC m=+1325.520301633" Dec 08 21:40:44 crc kubenswrapper[4912]: I1208 21:40:44.587821 4912 generic.go:334] "Generic (PLEG): container finished" podID="85aef8d3-55b7-44b3-81db-c84293e8c5fd" containerID="d95722f90a5682996e53dd3a24fc47ac1e9f9f6a2ceb792e31bae4626d68fd2a" exitCode=0 Dec 08 21:40:44 crc kubenswrapper[4912]: I1208 21:40:44.587911 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-ed29-account-create-update-bgmtf" event={"ID":"85aef8d3-55b7-44b3-81db-c84293e8c5fd","Type":"ContainerDied","Data":"d95722f90a5682996e53dd3a24fc47ac1e9f9f6a2ceb792e31bae4626d68fd2a"} Dec 08 21:40:44 crc kubenswrapper[4912]: I1208 21:40:44.590817 4912 generic.go:334] "Generic (PLEG): container finished" podID="e9227e7c-9a8f-4c43-a44b-29b113a7a8cf" containerID="62ea6c5140e63b84bfbe83ed0c1b329312ad81a300301341b186a345f421e954" exitCode=0 Dec 08 21:40:44 crc kubenswrapper[4912]: I1208 21:40:44.590894 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-wlxkk" event={"ID":"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf","Type":"ContainerDied","Data":"62ea6c5140e63b84bfbe83ed0c1b329312ad81a300301341b186a345f421e954"} Dec 08 21:40:44 crc kubenswrapper[4912]: I1208 21:40:44.597019 4912 generic.go:334] "Generic (PLEG): container finished" podID="74214961-90de-45d4-8b70-a53db54e6a8c" containerID="bc9f7995cc9b298f6dda16ff398900d2f4ca33353c9e414f3a70a3c00e8f7811" exitCode=0 Dec 08 21:40:44 crc kubenswrapper[4912]: I1208 21:40:44.597104 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vd8lj" event={"ID":"74214961-90de-45d4-8b70-a53db54e6a8c","Type":"ContainerDied","Data":"bc9f7995cc9b298f6dda16ff398900d2f4ca33353c9e414f3a70a3c00e8f7811"} Dec 08 21:40:44 crc kubenswrapper[4912]: I1208 21:40:44.599381 4912 generic.go:334] "Generic (PLEG): container finished" podID="f39e3e23-7dbe-49d6-9159-258cb947b761" containerID="7464cc8c599b7d767baa3b5effc54dd136cf4fcec755ca4de4939e5556bd1665" exitCode=0 Dec 08 21:40:44 crc kubenswrapper[4912]: I1208 21:40:44.599478 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-fbd8-account-create-update-8cvpw" event={"ID":"f39e3e23-7dbe-49d6-9159-258cb947b761","Type":"ContainerDied","Data":"7464cc8c599b7d767baa3b5effc54dd136cf4fcec755ca4de4939e5556bd1665"} Dec 08 21:40:44 crc kubenswrapper[4912]: I1208 21:40:44.604753 4912 generic.go:334] "Generic (PLEG): container finished" podID="545dd2e8-5f39-4673-b406-f42be1033a46" containerID="1728362c583209638b70a9b644027db1090af689cf56281745c8a5be0ed4a21b" exitCode=0 Dec 08 21:40:44 crc kubenswrapper[4912]: I1208 21:40:44.604846 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-957f-account-create-update-9hlk9" event={"ID":"545dd2e8-5f39-4673-b406-f42be1033a46","Type":"ContainerDied","Data":"1728362c583209638b70a9b644027db1090af689cf56281745c8a5be0ed4a21b"} Dec 08 21:40:46 crc kubenswrapper[4912]: I1208 21:40:46.855752 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:46 crc kubenswrapper[4912]: I1208 21:40:46.886600 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/928f7959-0e9f-4b2d-bfa2-2d970196f49f-etc-swift\") pod \"swift-storage-0\" (UID: \"928f7959-0e9f-4b2d-bfa2-2d970196f49f\") " pod="openstack/swift-storage-0" Dec 08 21:40:47 crc kubenswrapper[4912]: I1208 21:40:47.146144 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 08 21:40:47 crc kubenswrapper[4912]: I1208 21:40:47.537951 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-957f-account-create-update-9hlk9" Dec 08 21:40:47 crc kubenswrapper[4912]: I1208 21:40:47.562332 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mr8rp" Dec 08 21:40:47 crc kubenswrapper[4912]: I1208 21:40:47.585710 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-fbd8-account-create-update-8cvpw" Dec 08 21:40:47 crc kubenswrapper[4912]: I1208 21:40:47.613959 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vd8lj" Dec 08 21:40:47 crc kubenswrapper[4912]: I1208 21:40:47.617722 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-ed29-account-create-update-bgmtf" Dec 08 21:40:47 crc kubenswrapper[4912]: I1208 21:40:47.625747 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-wlxkk" Dec 08 21:40:47 crc kubenswrapper[4912]: I1208 21:40:47.645639 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-wlxkk" Dec 08 21:40:47 crc kubenswrapper[4912]: I1208 21:40:47.646096 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-wlxkk" event={"ID":"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf","Type":"ContainerDied","Data":"1e776de794fb20005cc9e406268479585c13cb7db9aa61c4b17a333538be16b7"} Dec 08 21:40:47 crc kubenswrapper[4912]: I1208 21:40:47.646244 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e776de794fb20005cc9e406268479585c13cb7db9aa61c4b17a333538be16b7" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.668698 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-957f-account-create-update-9hlk9" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.668761 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-957f-account-create-update-9hlk9" event={"ID":"545dd2e8-5f39-4673-b406-f42be1033a46","Type":"ContainerDied","Data":"c7ba97b506ceb11e5bdd1533baf452236a7531ad047ed39f92872cad35c55129"} Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.668799 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7ba97b506ceb11e5bdd1533baf452236a7531ad047ed39f92872cad35c55129" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.671584 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vd8lj" event={"ID":"74214961-90de-45d4-8b70-a53db54e6a8c","Type":"ContainerDied","Data":"cfc8f349fae7d409667537ad42ca918cbf8e66bae164819a81fc0cb046e0f8bf"} Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.671610 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cfc8f349fae7d409667537ad42ca918cbf8e66bae164819a81fc0cb046e0f8bf" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.671659 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vd8lj" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.675849 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-fbd8-account-create-update-8cvpw" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.675956 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-fbd8-account-create-update-8cvpw" event={"ID":"f39e3e23-7dbe-49d6-9159-258cb947b761","Type":"ContainerDied","Data":"afd4b48ce339853f7556102008ab2467ffb9323c0649ca1a9efbc69eb37eb86f"} Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.676585 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="afd4b48ce339853f7556102008ab2467ffb9323c0649ca1a9efbc69eb37eb86f" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.679432 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-ed29-account-create-update-bgmtf" event={"ID":"85aef8d3-55b7-44b3-81db-c84293e8c5fd","Type":"ContainerDied","Data":"55163cbfc624f5c6ec043980fc873de1dc3b3e8b9bc3c8df53287887395fab39"} Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.679474 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55163cbfc624f5c6ec043980fc873de1dc3b3e8b9bc3c8df53287887395fab39" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.679537 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-ed29-account-create-update-bgmtf" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.689419 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mr8rp" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.689920 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mr8rp" event={"ID":"b52032bd-e87f-4e3f-9502-847a57d802e4","Type":"ContainerDied","Data":"21c9586434672a9bae31d9c8ec3663a69e0e47d148c6f8b3b159fa17fafa835e"} Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.689959 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21c9586434672a9bae31d9c8ec3663a69e0e47d148c6f8b3b159fa17fafa835e" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.703216 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58frt\" (UniqueName: \"kubernetes.io/projected/f39e3e23-7dbe-49d6-9159-258cb947b761-kube-api-access-58frt\") pod \"f39e3e23-7dbe-49d6-9159-258cb947b761\" (UID: \"f39e3e23-7dbe-49d6-9159-258cb947b761\") " Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.703288 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b52032bd-e87f-4e3f-9502-847a57d802e4-operator-scripts\") pod \"b52032bd-e87f-4e3f-9502-847a57d802e4\" (UID: \"b52032bd-e87f-4e3f-9502-847a57d802e4\") " Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.703351 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/545dd2e8-5f39-4673-b406-f42be1033a46-operator-scripts\") pod \"545dd2e8-5f39-4673-b406-f42be1033a46\" (UID: \"545dd2e8-5f39-4673-b406-f42be1033a46\") " Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.703670 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f39e3e23-7dbe-49d6-9159-258cb947b761-operator-scripts\") pod \"f39e3e23-7dbe-49d6-9159-258cb947b761\" (UID: \"f39e3e23-7dbe-49d6-9159-258cb947b761\") " Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.703764 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqhj2\" (UniqueName: \"kubernetes.io/projected/b52032bd-e87f-4e3f-9502-847a57d802e4-kube-api-access-rqhj2\") pod \"b52032bd-e87f-4e3f-9502-847a57d802e4\" (UID: \"b52032bd-e87f-4e3f-9502-847a57d802e4\") " Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.703809 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkkgd\" (UniqueName: \"kubernetes.io/projected/545dd2e8-5f39-4673-b406-f42be1033a46-kube-api-access-gkkgd\") pod \"545dd2e8-5f39-4673-b406-f42be1033a46\" (UID: \"545dd2e8-5f39-4673-b406-f42be1033a46\") " Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.704914 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b52032bd-e87f-4e3f-9502-847a57d802e4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b52032bd-e87f-4e3f-9502-847a57d802e4" (UID: "b52032bd-e87f-4e3f-9502-847a57d802e4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.704917 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/545dd2e8-5f39-4673-b406-f42be1033a46-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "545dd2e8-5f39-4673-b406-f42be1033a46" (UID: "545dd2e8-5f39-4673-b406-f42be1033a46"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.705298 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f39e3e23-7dbe-49d6-9159-258cb947b761-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f39e3e23-7dbe-49d6-9159-258cb947b761" (UID: "f39e3e23-7dbe-49d6-9159-258cb947b761"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.706712 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f39e3e23-7dbe-49d6-9159-258cb947b761-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.706737 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b52032bd-e87f-4e3f-9502-847a57d802e4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.706748 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/545dd2e8-5f39-4673-b406-f42be1033a46-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.710126 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b52032bd-e87f-4e3f-9502-847a57d802e4-kube-api-access-rqhj2" (OuterVolumeSpecName: "kube-api-access-rqhj2") pod "b52032bd-e87f-4e3f-9502-847a57d802e4" (UID: "b52032bd-e87f-4e3f-9502-847a57d802e4"). InnerVolumeSpecName "kube-api-access-rqhj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.710289 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/545dd2e8-5f39-4673-b406-f42be1033a46-kube-api-access-gkkgd" (OuterVolumeSpecName: "kube-api-access-gkkgd") pod "545dd2e8-5f39-4673-b406-f42be1033a46" (UID: "545dd2e8-5f39-4673-b406-f42be1033a46"). InnerVolumeSpecName "kube-api-access-gkkgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.710985 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-vs7fn" podStartSLOduration=4.198338802 podStartE2EDuration="8.710967604s" podCreationTimestamp="2025-12-08 21:40:39 +0000 UTC" firstStartedPulling="2025-12-08 21:40:42.886764932 +0000 UTC m=+1324.749767015" lastFinishedPulling="2025-12-08 21:40:47.399393734 +0000 UTC m=+1329.262395817" observedRunningTime="2025-12-08 21:40:47.701210529 +0000 UTC m=+1329.564212612" watchObservedRunningTime="2025-12-08 21:40:47.710967604 +0000 UTC m=+1329.573969687" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.715566 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f39e3e23-7dbe-49d6-9159-258cb947b761-kube-api-access-58frt" (OuterVolumeSpecName: "kube-api-access-58frt") pod "f39e3e23-7dbe-49d6-9159-258cb947b761" (UID: "f39e3e23-7dbe-49d6-9159-258cb947b761"). InnerVolumeSpecName "kube-api-access-58frt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.808257 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxdj5\" (UniqueName: \"kubernetes.io/projected/74214961-90de-45d4-8b70-a53db54e6a8c-kube-api-access-lxdj5\") pod \"74214961-90de-45d4-8b70-a53db54e6a8c\" (UID: \"74214961-90de-45d4-8b70-a53db54e6a8c\") " Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.808315 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74214961-90de-45d4-8b70-a53db54e6a8c-operator-scripts\") pod \"74214961-90de-45d4-8b70-a53db54e6a8c\" (UID: \"74214961-90de-45d4-8b70-a53db54e6a8c\") " Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.808393 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7hsd\" (UniqueName: \"kubernetes.io/projected/85aef8d3-55b7-44b3-81db-c84293e8c5fd-kube-api-access-n7hsd\") pod \"85aef8d3-55b7-44b3-81db-c84293e8c5fd\" (UID: \"85aef8d3-55b7-44b3-81db-c84293e8c5fd\") " Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.808414 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85aef8d3-55b7-44b3-81db-c84293e8c5fd-operator-scripts\") pod \"85aef8d3-55b7-44b3-81db-c84293e8c5fd\" (UID: \"85aef8d3-55b7-44b3-81db-c84293e8c5fd\") " Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.808468 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf-operator-scripts\") pod \"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf\" (UID: \"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf\") " Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.808508 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tb52v\" (UniqueName: \"kubernetes.io/projected/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf-kube-api-access-tb52v\") pod \"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf\" (UID: \"e9227e7c-9a8f-4c43-a44b-29b113a7a8cf\") " Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.808920 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e9227e7c-9a8f-4c43-a44b-29b113a7a8cf" (UID: "e9227e7c-9a8f-4c43-a44b-29b113a7a8cf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.809005 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85aef8d3-55b7-44b3-81db-c84293e8c5fd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "85aef8d3-55b7-44b3-81db-c84293e8c5fd" (UID: "85aef8d3-55b7-44b3-81db-c84293e8c5fd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.809080 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74214961-90de-45d4-8b70-a53db54e6a8c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "74214961-90de-45d4-8b70-a53db54e6a8c" (UID: "74214961-90de-45d4-8b70-a53db54e6a8c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.809837 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85aef8d3-55b7-44b3-81db-c84293e8c5fd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.809867 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.809877 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqhj2\" (UniqueName: \"kubernetes.io/projected/b52032bd-e87f-4e3f-9502-847a57d802e4-kube-api-access-rqhj2\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.809891 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkkgd\" (UniqueName: \"kubernetes.io/projected/545dd2e8-5f39-4673-b406-f42be1033a46-kube-api-access-gkkgd\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.809899 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58frt\" (UniqueName: \"kubernetes.io/projected/f39e3e23-7dbe-49d6-9159-258cb947b761-kube-api-access-58frt\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.809908 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74214961-90de-45d4-8b70-a53db54e6a8c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.812405 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85aef8d3-55b7-44b3-81db-c84293e8c5fd-kube-api-access-n7hsd" (OuterVolumeSpecName: "kube-api-access-n7hsd") pod "85aef8d3-55b7-44b3-81db-c84293e8c5fd" (UID: "85aef8d3-55b7-44b3-81db-c84293e8c5fd"). InnerVolumeSpecName "kube-api-access-n7hsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.813136 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf-kube-api-access-tb52v" (OuterVolumeSpecName: "kube-api-access-tb52v") pod "e9227e7c-9a8f-4c43-a44b-29b113a7a8cf" (UID: "e9227e7c-9a8f-4c43-a44b-29b113a7a8cf"). InnerVolumeSpecName "kube-api-access-tb52v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.813223 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74214961-90de-45d4-8b70-a53db54e6a8c-kube-api-access-lxdj5" (OuterVolumeSpecName: "kube-api-access-lxdj5") pod "74214961-90de-45d4-8b70-a53db54e6a8c" (UID: "74214961-90de-45d4-8b70-a53db54e6a8c"). InnerVolumeSpecName "kube-api-access-lxdj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.910790 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tb52v\" (UniqueName: \"kubernetes.io/projected/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf-kube-api-access-tb52v\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.911240 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxdj5\" (UniqueName: \"kubernetes.io/projected/74214961-90de-45d4-8b70-a53db54e6a8c-kube-api-access-lxdj5\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.911254 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7hsd\" (UniqueName: \"kubernetes.io/projected/85aef8d3-55b7-44b3-81db-c84293e8c5fd-kube-api-access-n7hsd\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:47.979433 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:48.714552 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vs7fn" event={"ID":"22da54af-1006-403d-b33a-ae71353ee4e6","Type":"ContainerStarted","Data":"07f3265906a397d70be21b913f06ed222a4308b8eb3b4bf34931af3e2fdec612"} Dec 08 21:40:48 crc kubenswrapper[4912]: I1208 21:40:48.719275 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"3082babbeeff3190d1d59dfb804f97b317a9e6e50d272cfc667b30de8e8c815b"} Dec 08 21:40:49 crc kubenswrapper[4912]: I1208 21:40:49.735061 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"ab1267b4f441b8590eb236c49e79a3d3595c13fc8234414f8bfcf706343bad9b"} Dec 08 21:40:49 crc kubenswrapper[4912]: I1208 21:40:49.735919 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"4d2b29dc646d412e07d1e78b8cf852a285f77c456f257c80d9875cb8ae4ea8e6"} Dec 08 21:40:50 crc kubenswrapper[4912]: I1208 21:40:50.748578 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"fd5b52298cc2e90c80716aa3d77f01484e11df6c8a1649ba348d43ded3b0ae31"} Dec 08 21:40:50 crc kubenswrapper[4912]: I1208 21:40:50.748630 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"979d136b8e7a69a5f938df2531dd846931dc4027e7e482bd67d1e980f03a6c84"} Dec 08 21:40:52 crc kubenswrapper[4912]: I1208 21:40:52.769496 4912 generic.go:334] "Generic (PLEG): container finished" podID="22da54af-1006-403d-b33a-ae71353ee4e6" containerID="07f3265906a397d70be21b913f06ed222a4308b8eb3b4bf34931af3e2fdec612" exitCode=0 Dec 08 21:40:52 crc kubenswrapper[4912]: I1208 21:40:52.769599 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vs7fn" event={"ID":"22da54af-1006-403d-b33a-ae71353ee4e6","Type":"ContainerDied","Data":"07f3265906a397d70be21b913f06ed222a4308b8eb3b4bf34931af3e2fdec612"} Dec 08 21:40:52 crc kubenswrapper[4912]: I1208 21:40:52.776809 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"ea4040c2c8b340c27814eb5bd95b1e598eb5d55482176e0b47347c4828514f0c"} Dec 08 21:40:52 crc kubenswrapper[4912]: I1208 21:40:52.776866 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"650ab26deaa39a0862c4ec5752a271d121456304472ffff482f84148c4cf85d8"} Dec 08 21:40:52 crc kubenswrapper[4912]: I1208 21:40:52.776877 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"8e7d4ad6c247cf2383a0134acb1868e37d052c3ca98a438d83d8ecf043553e52"} Dec 08 21:40:52 crc kubenswrapper[4912]: I1208 21:40:52.776885 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"51f4b51d9eb8cf5c18fd848eedf3822b092684c59d7516d311e41a8afbcb77e1"} Dec 08 21:40:53 crc kubenswrapper[4912]: I1208 21:40:53.787144 4912 generic.go:334] "Generic (PLEG): container finished" podID="3d4fe027-c837-4bb3-b658-30c00d41ce24" containerID="c6d6e46d6e5b92add6b665e1dc856c21546d651ba2bb3c43f6629b60b35371fb" exitCode=0 Dec 08 21:40:53 crc kubenswrapper[4912]: I1208 21:40:53.787228 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-nftjn" event={"ID":"3d4fe027-c837-4bb3-b658-30c00d41ce24","Type":"ContainerDied","Data":"c6d6e46d6e5b92add6b665e1dc856c21546d651ba2bb3c43f6629b60b35371fb"} Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.096139 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.223247 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22da54af-1006-403d-b33a-ae71353ee4e6-config-data\") pod \"22da54af-1006-403d-b33a-ae71353ee4e6\" (UID: \"22da54af-1006-403d-b33a-ae71353ee4e6\") " Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.223438 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkvbm\" (UniqueName: \"kubernetes.io/projected/22da54af-1006-403d-b33a-ae71353ee4e6-kube-api-access-hkvbm\") pod \"22da54af-1006-403d-b33a-ae71353ee4e6\" (UID: \"22da54af-1006-403d-b33a-ae71353ee4e6\") " Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.224394 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22da54af-1006-403d-b33a-ae71353ee4e6-combined-ca-bundle\") pod \"22da54af-1006-403d-b33a-ae71353ee4e6\" (UID: \"22da54af-1006-403d-b33a-ae71353ee4e6\") " Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.233162 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22da54af-1006-403d-b33a-ae71353ee4e6-kube-api-access-hkvbm" (OuterVolumeSpecName: "kube-api-access-hkvbm") pod "22da54af-1006-403d-b33a-ae71353ee4e6" (UID: "22da54af-1006-403d-b33a-ae71353ee4e6"). InnerVolumeSpecName "kube-api-access-hkvbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.248374 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22da54af-1006-403d-b33a-ae71353ee4e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22da54af-1006-403d-b33a-ae71353ee4e6" (UID: "22da54af-1006-403d-b33a-ae71353ee4e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.282158 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22da54af-1006-403d-b33a-ae71353ee4e6-config-data" (OuterVolumeSpecName: "config-data") pod "22da54af-1006-403d-b33a-ae71353ee4e6" (UID: "22da54af-1006-403d-b33a-ae71353ee4e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.326777 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22da54af-1006-403d-b33a-ae71353ee4e6-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.326838 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkvbm\" (UniqueName: \"kubernetes.io/projected/22da54af-1006-403d-b33a-ae71353ee4e6-kube-api-access-hkvbm\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.326855 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22da54af-1006-403d-b33a-ae71353ee4e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.797892 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vs7fn" event={"ID":"22da54af-1006-403d-b33a-ae71353ee4e6","Type":"ContainerDied","Data":"ade9e87f69cd3478739995918433a61ac7117787660749bd756661451ee1bd3d"} Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.798205 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ade9e87f69cd3478739995918433a61ac7117787660749bd756661451ee1bd3d" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.797922 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vs7fn" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.814180 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"e9b154f541b248a5a2f370f911743305cd1f61a8255c1163e71b228756ce26ec"} Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.814230 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"186d6293ef254fac9da0f872b8b344599b028c9464e53cb07e0df524beaf6784"} Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.814248 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"434e77e9d546d836e1b98cf2e95129e0f5b243c8def06ea5c1429b7172ff9844"} Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.981858 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-9d475"] Dec 08 21:40:54 crc kubenswrapper[4912]: E1208 21:40:54.982555 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74214961-90de-45d4-8b70-a53db54e6a8c" containerName="mariadb-database-create" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.982574 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="74214961-90de-45d4-8b70-a53db54e6a8c" containerName="mariadb-database-create" Dec 08 21:40:54 crc kubenswrapper[4912]: E1208 21:40:54.982589 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2fd975a-48c8-42a1-a81d-869c32e97dc8" containerName="swift-ring-rebalance" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.982599 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2fd975a-48c8-42a1-a81d-869c32e97dc8" containerName="swift-ring-rebalance" Dec 08 21:40:54 crc kubenswrapper[4912]: E1208 21:40:54.982617 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22da54af-1006-403d-b33a-ae71353ee4e6" containerName="keystone-db-sync" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.982625 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="22da54af-1006-403d-b33a-ae71353ee4e6" containerName="keystone-db-sync" Dec 08 21:40:54 crc kubenswrapper[4912]: E1208 21:40:54.982656 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39e3e23-7dbe-49d6-9159-258cb947b761" containerName="mariadb-account-create-update" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.982664 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39e3e23-7dbe-49d6-9159-258cb947b761" containerName="mariadb-account-create-update" Dec 08 21:40:54 crc kubenswrapper[4912]: E1208 21:40:54.982680 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85aef8d3-55b7-44b3-81db-c84293e8c5fd" containerName="mariadb-account-create-update" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.982688 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="85aef8d3-55b7-44b3-81db-c84293e8c5fd" containerName="mariadb-account-create-update" Dec 08 21:40:54 crc kubenswrapper[4912]: E1208 21:40:54.982699 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9227e7c-9a8f-4c43-a44b-29b113a7a8cf" containerName="mariadb-database-create" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.982709 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9227e7c-9a8f-4c43-a44b-29b113a7a8cf" containerName="mariadb-database-create" Dec 08 21:40:54 crc kubenswrapper[4912]: E1208 21:40:54.982728 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="545dd2e8-5f39-4673-b406-f42be1033a46" containerName="mariadb-account-create-update" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.982739 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="545dd2e8-5f39-4673-b406-f42be1033a46" containerName="mariadb-account-create-update" Dec 08 21:40:54 crc kubenswrapper[4912]: E1208 21:40:54.982771 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b52032bd-e87f-4e3f-9502-847a57d802e4" containerName="mariadb-database-create" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.982781 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="b52032bd-e87f-4e3f-9502-847a57d802e4" containerName="mariadb-database-create" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.982989 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="85aef8d3-55b7-44b3-81db-c84293e8c5fd" containerName="mariadb-account-create-update" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.983009 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="74214961-90de-45d4-8b70-a53db54e6a8c" containerName="mariadb-database-create" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.983023 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="b52032bd-e87f-4e3f-9502-847a57d802e4" containerName="mariadb-database-create" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.983051 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9227e7c-9a8f-4c43-a44b-29b113a7a8cf" containerName="mariadb-database-create" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.983068 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="22da54af-1006-403d-b33a-ae71353ee4e6" containerName="keystone-db-sync" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.983090 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f39e3e23-7dbe-49d6-9159-258cb947b761" containerName="mariadb-account-create-update" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.983101 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="545dd2e8-5f39-4673-b406-f42be1033a46" containerName="mariadb-account-create-update" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.983112 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2fd975a-48c8-42a1-a81d-869c32e97dc8" containerName="swift-ring-rebalance" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.984427 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.991305 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7qx69"] Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.992823 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.997134 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.997389 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.997511 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.997745 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-c727c" Dec 08 21:40:54 crc kubenswrapper[4912]: I1208 21:40:54.997873 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.009901 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-9d475"] Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.022147 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7qx69"] Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.042558 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-credential-keys\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.042645 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.042687 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.042723 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n98h4\" (UniqueName: \"kubernetes.io/projected/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-kube-api-access-n98h4\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.042786 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-dns-svc\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.043021 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-fernet-keys\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.043080 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-combined-ca-bundle\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.043293 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-config\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.043391 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-config-data\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.043427 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-scripts\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.043488 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbbp9\" (UniqueName: \"kubernetes.io/projected/a562a39e-3f55-4bc4-8fcd-4e3397110e99-kube-api-access-xbbp9\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.145233 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-config\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.145294 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-config-data\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.145316 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-scripts\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.145333 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbbp9\" (UniqueName: \"kubernetes.io/projected/a562a39e-3f55-4bc4-8fcd-4e3397110e99-kube-api-access-xbbp9\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.145364 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-credential-keys\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.145381 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.145397 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.145413 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n98h4\" (UniqueName: \"kubernetes.io/projected/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-kube-api-access-n98h4\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.145434 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-dns-svc\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.145488 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-fernet-keys\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.145506 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-combined-ca-bundle\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.146487 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-config\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.146524 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.147395 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-dns-svc\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.147512 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.157769 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-credential-keys\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.158325 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-scripts\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.160015 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-config-data\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.162761 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-fernet-keys\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.168020 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-combined-ca-bundle\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.169482 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n98h4\" (UniqueName: \"kubernetes.io/projected/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-kube-api-access-n98h4\") pod \"keystone-bootstrap-7qx69\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.175731 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbbp9\" (UniqueName: \"kubernetes.io/projected/a562a39e-3f55-4bc4-8fcd-4e3397110e99-kube-api-access-xbbp9\") pod \"dnsmasq-dns-f877ddd87-9d475\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.261709 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-9d475"] Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.262367 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.297074 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-4xrcd"] Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.298294 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.302151 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-56lcr" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.302232 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.302342 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.324332 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4xrcd"] Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.351743 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.359254 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-bsx5q"] Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.360818 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.390525 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-djggl"] Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.391718 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.393496 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.393643 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.400679 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-jqk94" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.420253 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-7hldc"] Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.421668 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.426879 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.427121 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.429895 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-zv9zp" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.440415 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-bsx5q"] Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.454823 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/06e69c2b-f54a-466a-9f5c-60499b4f5123-etc-machine-id\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.454869 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-config-data\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.454890 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-logs\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.454914 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4574c96-aa92-4621-92e2-d8ee041d94c8-combined-ca-bundle\") pod \"neutron-db-sync-4xrcd\" (UID: \"f4574c96-aa92-4621-92e2-d8ee041d94c8\") " pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.454939 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qpqm\" (UniqueName: \"kubernetes.io/projected/06e69c2b-f54a-466a-9f5c-60499b4f5123-kube-api-access-8qpqm\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.454957 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-ovsdbserver-nb\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.454984 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcmp7\" (UniqueName: \"kubernetes.io/projected/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-kube-api-access-zcmp7\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.455001 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-combined-ca-bundle\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.455016 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-scripts\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.455066 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-ovsdbserver-sb\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.455092 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-dns-svc\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.455122 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4574c96-aa92-4621-92e2-d8ee041d94c8-config\") pod \"neutron-db-sync-4xrcd\" (UID: \"f4574c96-aa92-4621-92e2-d8ee041d94c8\") " pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.455146 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-config\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.455166 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-scripts\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.455184 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-config-data\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.455200 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-db-sync-config-data\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.455232 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq6wh\" (UniqueName: \"kubernetes.io/projected/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-kube-api-access-rq6wh\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.455246 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-combined-ca-bundle\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.455271 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hts7w\" (UniqueName: \"kubernetes.io/projected/f4574c96-aa92-4621-92e2-d8ee041d94c8-kube-api-access-hts7w\") pod \"neutron-db-sync-4xrcd\" (UID: \"f4574c96-aa92-4621-92e2-d8ee041d94c8\") " pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.460653 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-djggl"] Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.497446 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-7hldc"] Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.508555 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.527599 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-krnh9"] Dec 08 21:40:55 crc kubenswrapper[4912]: E1208 21:40:55.528242 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d4fe027-c837-4bb3-b658-30c00d41ce24" containerName="glance-db-sync" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.528255 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d4fe027-c837-4bb3-b658-30c00d41ce24" containerName="glance-db-sync" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.528428 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d4fe027-c837-4bb3-b658-30c00d41ce24" containerName="glance-db-sync" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.532153 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-krnh9" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.542223 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-krnh9"] Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.554690 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-txdnh" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.554934 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.605335 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6zhm\" (UniqueName: \"kubernetes.io/projected/3d4fe027-c837-4bb3-b658-30c00d41ce24-kube-api-access-k6zhm\") pod \"3d4fe027-c837-4bb3-b658-30c00d41ce24\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.605435 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-db-sync-config-data\") pod \"3d4fe027-c837-4bb3-b658-30c00d41ce24\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.605614 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-combined-ca-bundle\") pod \"3d4fe027-c837-4bb3-b658-30c00d41ce24\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.605723 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-config-data\") pod \"3d4fe027-c837-4bb3-b658-30c00d41ce24\" (UID: \"3d4fe027-c837-4bb3-b658-30c00d41ce24\") " Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606349 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4574c96-aa92-4621-92e2-d8ee041d94c8-config\") pod \"neutron-db-sync-4xrcd\" (UID: \"f4574c96-aa92-4621-92e2-d8ee041d94c8\") " pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606412 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-config\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606449 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-scripts\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606481 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-config-data\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606502 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-db-sync-config-data\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606547 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqrl7\" (UniqueName: \"kubernetes.io/projected/29694038-be1b-4d16-95ce-16c516b0f8bf-kube-api-access-lqrl7\") pod \"barbican-db-sync-krnh9\" (UID: \"29694038-be1b-4d16-95ce-16c516b0f8bf\") " pod="openstack/barbican-db-sync-krnh9" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606596 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq6wh\" (UniqueName: \"kubernetes.io/projected/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-kube-api-access-rq6wh\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606621 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-combined-ca-bundle\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606666 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hts7w\" (UniqueName: \"kubernetes.io/projected/f4574c96-aa92-4621-92e2-d8ee041d94c8-kube-api-access-hts7w\") pod \"neutron-db-sync-4xrcd\" (UID: \"f4574c96-aa92-4621-92e2-d8ee041d94c8\") " pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606694 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/06e69c2b-f54a-466a-9f5c-60499b4f5123-etc-machine-id\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606739 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-config-data\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606771 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-logs\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606812 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4574c96-aa92-4621-92e2-d8ee041d94c8-combined-ca-bundle\") pod \"neutron-db-sync-4xrcd\" (UID: \"f4574c96-aa92-4621-92e2-d8ee041d94c8\") " pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606858 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qpqm\" (UniqueName: \"kubernetes.io/projected/06e69c2b-f54a-466a-9f5c-60499b4f5123-kube-api-access-8qpqm\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606893 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29694038-be1b-4d16-95ce-16c516b0f8bf-combined-ca-bundle\") pod \"barbican-db-sync-krnh9\" (UID: \"29694038-be1b-4d16-95ce-16c516b0f8bf\") " pod="openstack/barbican-db-sync-krnh9" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606915 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-ovsdbserver-nb\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606961 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcmp7\" (UniqueName: \"kubernetes.io/projected/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-kube-api-access-zcmp7\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606979 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-combined-ca-bundle\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.606998 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-scripts\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.607025 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/29694038-be1b-4d16-95ce-16c516b0f8bf-db-sync-config-data\") pod \"barbican-db-sync-krnh9\" (UID: \"29694038-be1b-4d16-95ce-16c516b0f8bf\") " pod="openstack/barbican-db-sync-krnh9" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.607076 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-ovsdbserver-sb\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.607126 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-dns-svc\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.607241 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/06e69c2b-f54a-466a-9f5c-60499b4f5123-etc-machine-id\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.614743 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-logs\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.616954 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "3d4fe027-c837-4bb3-b658-30c00d41ce24" (UID: "3d4fe027-c837-4bb3-b658-30c00d41ce24"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.631362 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-dns-svc\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.631655 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-config\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.631847 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-ovsdbserver-nb\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.632738 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-ovsdbserver-sb\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.635706 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-combined-ca-bundle\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.635876 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4574c96-aa92-4621-92e2-d8ee041d94c8-config\") pod \"neutron-db-sync-4xrcd\" (UID: \"f4574c96-aa92-4621-92e2-d8ee041d94c8\") " pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.636696 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-config-data\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.649306 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-scripts\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.656459 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-config-data\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.676628 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-scripts\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.676802 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qpqm\" (UniqueName: \"kubernetes.io/projected/06e69c2b-f54a-466a-9f5c-60499b4f5123-kube-api-access-8qpqm\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.677059 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-combined-ca-bundle\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.677117 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq6wh\" (UniqueName: \"kubernetes.io/projected/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-kube-api-access-rq6wh\") pod \"dnsmasq-dns-68dcc9cf6f-bsx5q\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.678575 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-db-sync-config-data\") pod \"cinder-db-sync-djggl\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.679468 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d4fe027-c837-4bb3-b658-30c00d41ce24-kube-api-access-k6zhm" (OuterVolumeSpecName: "kube-api-access-k6zhm") pod "3d4fe027-c837-4bb3-b658-30c00d41ce24" (UID: "3d4fe027-c837-4bb3-b658-30c00d41ce24"). InnerVolumeSpecName "kube-api-access-k6zhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.687026 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hts7w\" (UniqueName: \"kubernetes.io/projected/f4574c96-aa92-4621-92e2-d8ee041d94c8-kube-api-access-hts7w\") pod \"neutron-db-sync-4xrcd\" (UID: \"f4574c96-aa92-4621-92e2-d8ee041d94c8\") " pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.688963 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcmp7\" (UniqueName: \"kubernetes.io/projected/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-kube-api-access-zcmp7\") pod \"placement-db-sync-7hldc\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.689104 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4574c96-aa92-4621-92e2-d8ee041d94c8-combined-ca-bundle\") pod \"neutron-db-sync-4xrcd\" (UID: \"f4574c96-aa92-4621-92e2-d8ee041d94c8\") " pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.709361 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29694038-be1b-4d16-95ce-16c516b0f8bf-combined-ca-bundle\") pod \"barbican-db-sync-krnh9\" (UID: \"29694038-be1b-4d16-95ce-16c516b0f8bf\") " pod="openstack/barbican-db-sync-krnh9" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.709419 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/29694038-be1b-4d16-95ce-16c516b0f8bf-db-sync-config-data\") pod \"barbican-db-sync-krnh9\" (UID: \"29694038-be1b-4d16-95ce-16c516b0f8bf\") " pod="openstack/barbican-db-sync-krnh9" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.709495 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqrl7\" (UniqueName: \"kubernetes.io/projected/29694038-be1b-4d16-95ce-16c516b0f8bf-kube-api-access-lqrl7\") pod \"barbican-db-sync-krnh9\" (UID: \"29694038-be1b-4d16-95ce-16c516b0f8bf\") " pod="openstack/barbican-db-sync-krnh9" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.709567 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6zhm\" (UniqueName: \"kubernetes.io/projected/3d4fe027-c837-4bb3-b658-30c00d41ce24-kube-api-access-k6zhm\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.709577 4912 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.715254 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29694038-be1b-4d16-95ce-16c516b0f8bf-combined-ca-bundle\") pod \"barbican-db-sync-krnh9\" (UID: \"29694038-be1b-4d16-95ce-16c516b0f8bf\") " pod="openstack/barbican-db-sync-krnh9" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.718184 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/29694038-be1b-4d16-95ce-16c516b0f8bf-db-sync-config-data\") pod \"barbican-db-sync-krnh9\" (UID: \"29694038-be1b-4d16-95ce-16c516b0f8bf\") " pod="openstack/barbican-db-sync-krnh9" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.724352 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-config-data" (OuterVolumeSpecName: "config-data") pod "3d4fe027-c837-4bb3-b658-30c00d41ce24" (UID: "3d4fe027-c837-4bb3-b658-30c00d41ce24"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.727347 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.733615 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqrl7\" (UniqueName: \"kubernetes.io/projected/29694038-be1b-4d16-95ce-16c516b0f8bf-kube-api-access-lqrl7\") pod \"barbican-db-sync-krnh9\" (UID: \"29694038-be1b-4d16-95ce-16c516b0f8bf\") " pod="openstack/barbican-db-sync-krnh9" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.752665 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.777238 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d4fe027-c837-4bb3-b658-30c00d41ce24" (UID: "3d4fe027-c837-4bb3-b658-30c00d41ce24"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.785487 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-djggl" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.792970 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-7hldc" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.811219 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.811254 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d4fe027-c837-4bb3-b658-30c00d41ce24-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.857686 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"12f0dad5742550248efe71a0918d00d8d222c94dfbe9352ac3009454c0f390e4"} Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.857996 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"e2f0d4f433479fab20dc43c523a1ac5f3e2f1f603dc05ab212faf8e7d6798a19"} Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.872960 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-nftjn" event={"ID":"3d4fe027-c837-4bb3-b658-30c00d41ce24","Type":"ContainerDied","Data":"ff83ac72d3b3b3a54dcf9bdbb270e8a4fe497ff1e3278dfbd388a11a6ca67441"} Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.872998 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff83ac72d3b3b3a54dcf9bdbb270e8a4fe497ff1e3278dfbd388a11a6ca67441" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.873170 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-nftjn" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.881017 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-krnh9" Dec 08 21:40:55 crc kubenswrapper[4912]: I1208 21:40:55.983553 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-9d475"] Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.158502 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7qx69"] Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.289217 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-bsx5q"] Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.326123 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f84976bdf-w8bpw"] Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.327845 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.368560 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f84976bdf-w8bpw"] Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.432520 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6rxd\" (UniqueName: \"kubernetes.io/projected/02f813e2-0749-4dee-b272-b21512eef31a-kube-api-access-p6rxd\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.432987 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-ovsdbserver-sb\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.433081 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-ovsdbserver-nb\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.433114 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-config\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.433261 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-dns-svc\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.534617 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-ovsdbserver-nb\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.534674 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-config\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.534778 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-dns-svc\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.534840 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6rxd\" (UniqueName: \"kubernetes.io/projected/02f813e2-0749-4dee-b272-b21512eef31a-kube-api-access-p6rxd\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.534914 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-ovsdbserver-sb\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.535764 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-ovsdbserver-sb\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.535791 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-ovsdbserver-nb\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.536447 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-dns-svc\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.536475 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-config\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.558658 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6rxd\" (UniqueName: \"kubernetes.io/projected/02f813e2-0749-4dee-b272-b21512eef31a-kube-api-access-p6rxd\") pod \"dnsmasq-dns-f84976bdf-w8bpw\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.619157 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4xrcd"] Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.677517 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.825836 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-bsx5q"] Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.836353 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-djggl"] Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.910582 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-krnh9"] Dec 08 21:40:56 crc kubenswrapper[4912]: W1208 21:40:56.913269 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29694038_be1b_4d16_95ce_16c516b0f8bf.slice/crio-f15ef95949a4b35ed00e708901029a5b9c15521ed9807a2d240fbc6b5bb84fde WatchSource:0}: Error finding container f15ef95949a4b35ed00e708901029a5b9c15521ed9807a2d240fbc6b5bb84fde: Status 404 returned error can't find the container with id f15ef95949a4b35ed00e708901029a5b9c15521ed9807a2d240fbc6b5bb84fde Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.918082 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-7hldc"] Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.920322 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4xrcd" event={"ID":"f4574c96-aa92-4621-92e2-d8ee041d94c8","Type":"ContainerStarted","Data":"5a3b569acec4fd654fffe9929087b149fdd599d1e62bca7939e9daf8982bac01"} Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.965136 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"04c53d2fa8ddfce92be2f67733e876dee0919acba122e65a066a4effc7bb41a1"} Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.976622 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-djggl" event={"ID":"06e69c2b-f54a-466a-9f5c-60499b4f5123","Type":"ContainerStarted","Data":"eb292047f3bf2c6ab58cff4b1f24da96c4054728782358a94fc6d8ce00a7fcfa"} Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.979086 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7qx69" event={"ID":"8a585189-dc76-4a0c-9b77-f5abbeb2d88b","Type":"ContainerStarted","Data":"306d4c646b69b4e0dc3a92c5f59d022bbc7639f39f2cbc9b0c37ba191d22cd7b"} Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.979133 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7qx69" event={"ID":"8a585189-dc76-4a0c-9b77-f5abbeb2d88b","Type":"ContainerStarted","Data":"d1d927984219fb611d9a93650b429b7f50c9fb52ab20bfb1549db7ebb51cded3"} Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.983190 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" event={"ID":"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04","Type":"ContainerStarted","Data":"d2b14a7b0b99511808d6628edb955f1aad1279e64d92eaf83250881869ffca22"} Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.985957 4912 generic.go:334] "Generic (PLEG): container finished" podID="a562a39e-3f55-4bc4-8fcd-4e3397110e99" containerID="104fc2b4ad859edd633f65f17070bf0da24f19de22d71ce884b1e0c0bb280e19" exitCode=0 Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.986005 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-9d475" event={"ID":"a562a39e-3f55-4bc4-8fcd-4e3397110e99","Type":"ContainerDied","Data":"104fc2b4ad859edd633f65f17070bf0da24f19de22d71ce884b1e0c0bb280e19"} Dec 08 21:40:56 crc kubenswrapper[4912]: I1208 21:40:56.986161 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-9d475" event={"ID":"a562a39e-3f55-4bc4-8fcd-4e3397110e99","Type":"ContainerStarted","Data":"ffee3b18aec9636922b36c903fa9585b8a6515ddc4be71e34659035ee0096412"} Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.238172 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.249122 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.249505 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.252256 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.252407 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.252519 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-p7sjg" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.357321 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f84976bdf-w8bpw"] Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.380962 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zl897\" (UniqueName: \"kubernetes.io/projected/127633c9-36c1-4593-a46c-fea139d969f8-kube-api-access-zl897\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.381017 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/127633c9-36c1-4593-a46c-fea139d969f8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.381074 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.381147 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-scripts\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.381185 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.381259 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/127633c9-36c1-4593-a46c-fea139d969f8-logs\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.381299 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-config-data\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.412512 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.436279 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:40:57 crc kubenswrapper[4912]: E1208 21:40:57.436779 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a562a39e-3f55-4bc4-8fcd-4e3397110e99" containerName="init" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.436792 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a562a39e-3f55-4bc4-8fcd-4e3397110e99" containerName="init" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.436968 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="a562a39e-3f55-4bc4-8fcd-4e3397110e99" containerName="init" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.438061 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.445413 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.449575 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.485653 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/127633c9-36c1-4593-a46c-fea139d969f8-logs\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.486366 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-config-data\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.486418 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zl897\" (UniqueName: \"kubernetes.io/projected/127633c9-36c1-4593-a46c-fea139d969f8-kube-api-access-zl897\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.486451 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/127633c9-36c1-4593-a46c-fea139d969f8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.486598 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.486648 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-scripts\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.486689 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.487998 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/127633c9-36c1-4593-a46c-fea139d969f8-logs\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.503269 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/127633c9-36c1-4593-a46c-fea139d969f8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.505988 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.506036 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f04dae7735d5049d0d88a291850a74e98319e9b887dd9c8e9e7d7d4d3762e2c6/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.506671 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:40:57 crc kubenswrapper[4912]: E1208 21:40:57.521794 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data glance kube-api-access-zl897 scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/glance-default-external-api-0" podUID="127633c9-36c1-4593-a46c-fea139d969f8" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.537403 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.539674 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-config-data\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.549548 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-scripts\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.555798 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zl897\" (UniqueName: \"kubernetes.io/projected/127633c9-36c1-4593-a46c-fea139d969f8-kube-api-access-zl897\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.590117 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-ovsdbserver-nb\") pod \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.590673 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-ovsdbserver-sb\") pod \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.590720 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-config\") pod \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.590770 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-dns-svc\") pod \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.590800 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbbp9\" (UniqueName: \"kubernetes.io/projected/a562a39e-3f55-4bc4-8fcd-4e3397110e99-kube-api-access-xbbp9\") pod \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\" (UID: \"a562a39e-3f55-4bc4-8fcd-4e3397110e99\") " Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.593618 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.593702 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.593738 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.597247 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/40102b0f-259c-4423-8934-6ab1397f2aa6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.597422 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldzq4\" (UniqueName: \"kubernetes.io/projected/40102b0f-259c-4423-8934-6ab1397f2aa6-kube-api-access-ldzq4\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.597493 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.597672 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40102b0f-259c-4423-8934-6ab1397f2aa6-logs\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.605585 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a562a39e-3f55-4bc4-8fcd-4e3397110e99-kube-api-access-xbbp9" (OuterVolumeSpecName: "kube-api-access-xbbp9") pod "a562a39e-3f55-4bc4-8fcd-4e3397110e99" (UID: "a562a39e-3f55-4bc4-8fcd-4e3397110e99"). InnerVolumeSpecName "kube-api-access-xbbp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.614274 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.619291 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a562a39e-3f55-4bc4-8fcd-4e3397110e99" (UID: "a562a39e-3f55-4bc4-8fcd-4e3397110e99"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.619927 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a562a39e-3f55-4bc4-8fcd-4e3397110e99" (UID: "a562a39e-3f55-4bc4-8fcd-4e3397110e99"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.651251 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-config" (OuterVolumeSpecName: "config") pod "a562a39e-3f55-4bc4-8fcd-4e3397110e99" (UID: "a562a39e-3f55-4bc4-8fcd-4e3397110e99"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.694344 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a562a39e-3f55-4bc4-8fcd-4e3397110e99" (UID: "a562a39e-3f55-4bc4-8fcd-4e3397110e99"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.701223 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.701289 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.701319 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.701346 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/40102b0f-259c-4423-8934-6ab1397f2aa6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.701397 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldzq4\" (UniqueName: \"kubernetes.io/projected/40102b0f-259c-4423-8934-6ab1397f2aa6-kube-api-access-ldzq4\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.701424 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.701488 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40102b0f-259c-4423-8934-6ab1397f2aa6-logs\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.701595 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbbp9\" (UniqueName: \"kubernetes.io/projected/a562a39e-3f55-4bc4-8fcd-4e3397110e99-kube-api-access-xbbp9\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.701609 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.701619 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.701631 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.701640 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a562a39e-3f55-4bc4-8fcd-4e3397110e99-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.705525 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/40102b0f-259c-4423-8934-6ab1397f2aa6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.708347 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.709264 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.711526 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40102b0f-259c-4423-8934-6ab1397f2aa6-logs\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.712173 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.714652 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.714698 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0f170b0bc0d5e657f5d6976432df9f5b93559f3f6e739297a830b8d1908e7cee/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.739655 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldzq4\" (UniqueName: \"kubernetes.io/projected/40102b0f-259c-4423-8934-6ab1397f2aa6-kube-api-access-ldzq4\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.753531 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"glance-default-internal-api-0\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:40:57 crc kubenswrapper[4912]: I1208 21:40:57.826912 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.003784 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-9d475" event={"ID":"a562a39e-3f55-4bc4-8fcd-4e3397110e99","Type":"ContainerDied","Data":"ffee3b18aec9636922b36c903fa9585b8a6515ddc4be71e34659035ee0096412"} Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.004108 4912 scope.go:117] "RemoveContainer" containerID="104fc2b4ad859edd633f65f17070bf0da24f19de22d71ce884b1e0c0bb280e19" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.004238 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-9d475" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.021499 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4xrcd" event={"ID":"f4574c96-aa92-4621-92e2-d8ee041d94c8","Type":"ContainerStarted","Data":"34432d647fab3a38b1d37480eb79f732e8f8464ca53533ee285cf5d5eedbbd60"} Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.060285 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-4xrcd" podStartSLOduration=3.060264733 podStartE2EDuration="3.060264733s" podCreationTimestamp="2025-12-08 21:40:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:58.049367887 +0000 UTC m=+1339.912369970" watchObservedRunningTime="2025-12-08 21:40:58.060264733 +0000 UTC m=+1339.923266816" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.106834 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"928f7959-0e9f-4b2d-bfa2-2d970196f49f","Type":"ContainerStarted","Data":"7ff6b78a07630ef13268796ef79feb53e18ea8a47b1b616a961de7f184056d5e"} Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.123843 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-9d475"] Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.131633 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-9d475"] Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.136423 4912 generic.go:334] "Generic (PLEG): container finished" podID="29b0c15a-bb73-455f-a6b9-f0f4ba7cee04" containerID="da5739eb121e55d508a8303b272da34626874df282c3dfbeb4cc835b2a5fecb4" exitCode=0 Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.136527 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" event={"ID":"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04","Type":"ContainerDied","Data":"da5739eb121e55d508a8303b272da34626874df282c3dfbeb4cc835b2a5fecb4"} Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.161552 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=39.483493202 podStartE2EDuration="45.161525396s" podCreationTimestamp="2025-12-08 21:40:13 +0000 UTC" firstStartedPulling="2025-12-08 21:40:47.99330693 +0000 UTC m=+1329.856309013" lastFinishedPulling="2025-12-08 21:40:53.671339124 +0000 UTC m=+1335.534341207" observedRunningTime="2025-12-08 21:40:58.156646303 +0000 UTC m=+1340.019648386" watchObservedRunningTime="2025-12-08 21:40:58.161525396 +0000 UTC m=+1340.024527479" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.183392 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-7hldc" event={"ID":"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a","Type":"ContainerStarted","Data":"4c69dbe1999e3500b7a759fd8ea11e893ac165ec23dd6472ecebeedbbd883f2f"} Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.210374 4912 generic.go:334] "Generic (PLEG): container finished" podID="02f813e2-0749-4dee-b272-b21512eef31a" containerID="cc95e2c918bc99ce57678b429b796d0af009f7035fccbc3fe8b58e6ccd921114" exitCode=0 Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.210798 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" event={"ID":"02f813e2-0749-4dee-b272-b21512eef31a","Type":"ContainerDied","Data":"cc95e2c918bc99ce57678b429b796d0af009f7035fccbc3fe8b58e6ccd921114"} Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.210831 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" event={"ID":"02f813e2-0749-4dee-b272-b21512eef31a","Type":"ContainerStarted","Data":"6bf67c3a8a039d9928193b959aadd03dfb3fe2f34ef365f39706e2fdcc2690f2"} Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.227435 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-krnh9" event={"ID":"29694038-be1b-4d16-95ce-16c516b0f8bf","Type":"ContainerStarted","Data":"f15ef95949a4b35ed00e708901029a5b9c15521ed9807a2d240fbc6b5bb84fde"} Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.227503 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.326166 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7qx69" podStartSLOduration=4.326143031 podStartE2EDuration="4.326143031s" podCreationTimestamp="2025-12-08 21:40:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:58.310071104 +0000 UTC m=+1340.173073197" watchObservedRunningTime="2025-12-08 21:40:58.326143031 +0000 UTC m=+1340.189145114" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.336197 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.424746 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"127633c9-36c1-4593-a46c-fea139d969f8\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.424789 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-scripts\") pod \"127633c9-36c1-4593-a46c-fea139d969f8\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.424842 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-config-data\") pod \"127633c9-36c1-4593-a46c-fea139d969f8\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.424905 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/127633c9-36c1-4593-a46c-fea139d969f8-httpd-run\") pod \"127633c9-36c1-4593-a46c-fea139d969f8\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.424947 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-combined-ca-bundle\") pod \"127633c9-36c1-4593-a46c-fea139d969f8\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.425069 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zl897\" (UniqueName: \"kubernetes.io/projected/127633c9-36c1-4593-a46c-fea139d969f8-kube-api-access-zl897\") pod \"127633c9-36c1-4593-a46c-fea139d969f8\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.425102 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/127633c9-36c1-4593-a46c-fea139d969f8-logs\") pod \"127633c9-36c1-4593-a46c-fea139d969f8\" (UID: \"127633c9-36c1-4593-a46c-fea139d969f8\") " Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.433986 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/127633c9-36c1-4593-a46c-fea139d969f8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "127633c9-36c1-4593-a46c-fea139d969f8" (UID: "127633c9-36c1-4593-a46c-fea139d969f8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.434020 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/127633c9-36c1-4593-a46c-fea139d969f8-logs" (OuterVolumeSpecName: "logs") pod "127633c9-36c1-4593-a46c-fea139d969f8" (UID: "127633c9-36c1-4593-a46c-fea139d969f8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.438688 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-config-data" (OuterVolumeSpecName: "config-data") pod "127633c9-36c1-4593-a46c-fea139d969f8" (UID: "127633c9-36c1-4593-a46c-fea139d969f8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.439098 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-scripts" (OuterVolumeSpecName: "scripts") pod "127633c9-36c1-4593-a46c-fea139d969f8" (UID: "127633c9-36c1-4593-a46c-fea139d969f8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.450296 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "127633c9-36c1-4593-a46c-fea139d969f8" (UID: "127633c9-36c1-4593-a46c-fea139d969f8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.455950 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/127633c9-36c1-4593-a46c-fea139d969f8-kube-api-access-zl897" (OuterVolumeSpecName: "kube-api-access-zl897") pod "127633c9-36c1-4593-a46c-fea139d969f8" (UID: "127633c9-36c1-4593-a46c-fea139d969f8"). InnerVolumeSpecName "kube-api-access-zl897". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.487011 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a562a39e-3f55-4bc4-8fcd-4e3397110e99" path="/var/lib/kubelet/pods/a562a39e-3f55-4bc4-8fcd-4e3397110e99/volumes" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.532631 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.532942 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.532956 4912 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/127633c9-36c1-4593-a46c-fea139d969f8-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.532967 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/127633c9-36c1-4593-a46c-fea139d969f8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.532980 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zl897\" (UniqueName: \"kubernetes.io/projected/127633c9-36c1-4593-a46c-fea139d969f8-kube-api-access-zl897\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.532992 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/127633c9-36c1-4593-a46c-fea139d969f8-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.552116 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f84976bdf-w8bpw"] Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.586944 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.610108 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4qv76"] Dec 08 21:40:58 crc kubenswrapper[4912]: E1208 21:40:58.610692 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29b0c15a-bb73-455f-a6b9-f0f4ba7cee04" containerName="init" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.610708 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="29b0c15a-bb73-455f-a6b9-f0f4ba7cee04" containerName="init" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.610986 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="29b0c15a-bb73-455f-a6b9-f0f4ba7cee04" containerName="init" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.612335 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.618690 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.634788 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-dns-svc\") pod \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.634904 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-ovsdbserver-nb\") pod \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.634958 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-ovsdbserver-sb\") pod \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.634998 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-config\") pod \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.635277 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq6wh\" (UniqueName: \"kubernetes.io/projected/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-kube-api-access-rq6wh\") pod \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\" (UID: \"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04\") " Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.635596 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.635656 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.635710 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh7dw\" (UniqueName: \"kubernetes.io/projected/d69c8959-7562-415d-bd31-6f8ed45750be-kube-api-access-sh7dw\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.635776 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-config\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.635835 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.635877 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.687712 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-kube-api-access-rq6wh" (OuterVolumeSpecName: "kube-api-access-rq6wh") pod "29b0c15a-bb73-455f-a6b9-f0f4ba7cee04" (UID: "29b0c15a-bb73-455f-a6b9-f0f4ba7cee04"). InnerVolumeSpecName "kube-api-access-rq6wh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.695738 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4qv76"] Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.710505 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "29b0c15a-bb73-455f-a6b9-f0f4ba7cee04" (UID: "29b0c15a-bb73-455f-a6b9-f0f4ba7cee04"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.759209 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-config" (OuterVolumeSpecName: "config") pod "29b0c15a-bb73-455f-a6b9-f0f4ba7cee04" (UID: "29b0c15a-bb73-455f-a6b9-f0f4ba7cee04"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.760502 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.760613 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.760745 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.760824 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.760931 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh7dw\" (UniqueName: \"kubernetes.io/projected/d69c8959-7562-415d-bd31-6f8ed45750be-kube-api-access-sh7dw\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.762405 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.762804 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.763021 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.763161 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-config\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.763484 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.763490 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.763515 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.763527 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq6wh\" (UniqueName: \"kubernetes.io/projected/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-kube-api-access-rq6wh\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.763996 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "29b0c15a-bb73-455f-a6b9-f0f4ba7cee04" (UID: "29b0c15a-bb73-455f-a6b9-f0f4ba7cee04"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.764180 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-config\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.810298 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "29b0c15a-bb73-455f-a6b9-f0f4ba7cee04" (UID: "29b0c15a-bb73-455f-a6b9-f0f4ba7cee04"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.829100 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh7dw\" (UniqueName: \"kubernetes.io/projected/d69c8959-7562-415d-bd31-6f8ed45750be-kube-api-access-sh7dw\") pod \"dnsmasq-dns-785d8bcb8c-4qv76\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.871255 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.871293 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.899519 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.902466 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0" (OuterVolumeSpecName: "glance") pod "127633c9-36c1-4593-a46c-fea139d969f8" (UID: "127633c9-36c1-4593-a46c-fea139d969f8"). InnerVolumeSpecName "pvc-f73b914b-b926-44ad-a1ae-1553428892b0". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:40:58 crc kubenswrapper[4912]: W1208 21:40:58.907389 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40102b0f_259c_4423_8934_6ab1397f2aa6.slice/crio-84971522fe57d673aaf8078e6bbe4cc582ccf9ddfb341835cee13de37b32e74c WatchSource:0}: Error finding container 84971522fe57d673aaf8078e6bbe4cc582ccf9ddfb341835cee13de37b32e74c: Status 404 returned error can't find the container with id 84971522fe57d673aaf8078e6bbe4cc582ccf9ddfb341835cee13de37b32e74c Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.956733 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:40:58 crc kubenswrapper[4912]: I1208 21:40:58.978174 4912 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") on node \"crc\" " Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.028972 4912 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.029260 4912 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-f73b914b-b926-44ad-a1ae-1553428892b0" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0") on node "crc" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.084583 4912 reconciler_common.go:293] "Volume detached for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") on node \"crc\" DevicePath \"\"" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.250738 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"40102b0f-259c-4423-8934-6ab1397f2aa6","Type":"ContainerStarted","Data":"84971522fe57d673aaf8078e6bbe4cc582ccf9ddfb341835cee13de37b32e74c"} Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.255758 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" event={"ID":"29b0c15a-bb73-455f-a6b9-f0f4ba7cee04","Type":"ContainerDied","Data":"d2b14a7b0b99511808d6628edb955f1aad1279e64d92eaf83250881869ffca22"} Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.255820 4912 scope.go:117] "RemoveContainer" containerID="da5739eb121e55d508a8303b272da34626874df282c3dfbeb4cc835b2a5fecb4" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.255833 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68dcc9cf6f-bsx5q" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.260617 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" event={"ID":"02f813e2-0749-4dee-b272-b21512eef31a","Type":"ContainerStarted","Data":"c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083"} Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.260856 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.260925 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" podUID="02f813e2-0749-4dee-b272-b21512eef31a" containerName="dnsmasq-dns" containerID="cri-o://c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083" gracePeriod=10 Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.285614 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" podStartSLOduration=3.285593585 podStartE2EDuration="3.285593585s" podCreationTimestamp="2025-12-08 21:40:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:40:59.281540315 +0000 UTC m=+1341.144542408" watchObservedRunningTime="2025-12-08 21:40:59.285593585 +0000 UTC m=+1341.148595668" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.335365 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.351318 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.373121 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.375331 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.380553 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.392962 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.431473 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-bsx5q"] Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.464092 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-bsx5q"] Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.492911 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.492975 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-scripts\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.493004 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.493058 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q24rz\" (UniqueName: \"kubernetes.io/projected/be88b255-f61e-4cf2-a795-14aecb94f665-kube-api-access-q24rz\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.493145 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be88b255-f61e-4cf2-a795-14aecb94f665-logs\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.493173 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-config-data\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.493204 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be88b255-f61e-4cf2-a795-14aecb94f665-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.500672 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4qv76"] Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.595922 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be88b255-f61e-4cf2-a795-14aecb94f665-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.596217 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.596326 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-scripts\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.596401 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.596510 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q24rz\" (UniqueName: \"kubernetes.io/projected/be88b255-f61e-4cf2-a795-14aecb94f665-kube-api-access-q24rz\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.596636 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be88b255-f61e-4cf2-a795-14aecb94f665-logs\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.596704 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-config-data\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.598965 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be88b255-f61e-4cf2-a795-14aecb94f665-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.602714 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be88b255-f61e-4cf2-a795-14aecb94f665-logs\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.603582 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-scripts\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.604553 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-config-data\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.605621 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.607293 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.607323 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f04dae7735d5049d0d88a291850a74e98319e9b887dd9c8e9e7d7d4d3762e2c6/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.623333 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q24rz\" (UniqueName: \"kubernetes.io/projected/be88b255-f61e-4cf2-a795-14aecb94f665-kube-api-access-q24rz\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.655503 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " pod="openstack/glance-default-external-api-0" Dec 08 21:40:59 crc kubenswrapper[4912]: I1208 21:40:59.710907 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.028657 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.209994 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-dns-svc\") pod \"02f813e2-0749-4dee-b272-b21512eef31a\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.212394 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-ovsdbserver-nb\") pod \"02f813e2-0749-4dee-b272-b21512eef31a\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.212436 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-ovsdbserver-sb\") pod \"02f813e2-0749-4dee-b272-b21512eef31a\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.212461 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-config\") pod \"02f813e2-0749-4dee-b272-b21512eef31a\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.212554 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6rxd\" (UniqueName: \"kubernetes.io/projected/02f813e2-0749-4dee-b272-b21512eef31a-kube-api-access-p6rxd\") pod \"02f813e2-0749-4dee-b272-b21512eef31a\" (UID: \"02f813e2-0749-4dee-b272-b21512eef31a\") " Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.218099 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02f813e2-0749-4dee-b272-b21512eef31a-kube-api-access-p6rxd" (OuterVolumeSpecName: "kube-api-access-p6rxd") pod "02f813e2-0749-4dee-b272-b21512eef31a" (UID: "02f813e2-0749-4dee-b272-b21512eef31a"). InnerVolumeSpecName "kube-api-access-p6rxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.259199 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "02f813e2-0749-4dee-b272-b21512eef31a" (UID: "02f813e2-0749-4dee-b272-b21512eef31a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.280211 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"40102b0f-259c-4423-8934-6ab1397f2aa6","Type":"ContainerStarted","Data":"a1a12b4dd4803939f9ceb06f2e0379f7df229dddc20cf33b4f05b7f2807644de"} Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.280775 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "02f813e2-0749-4dee-b272-b21512eef31a" (UID: "02f813e2-0749-4dee-b272-b21512eef31a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.281304 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-config" (OuterVolumeSpecName: "config") pod "02f813e2-0749-4dee-b272-b21512eef31a" (UID: "02f813e2-0749-4dee-b272-b21512eef31a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.285820 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "02f813e2-0749-4dee-b272-b21512eef31a" (UID: "02f813e2-0749-4dee-b272-b21512eef31a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.288344 4912 generic.go:334] "Generic (PLEG): container finished" podID="02f813e2-0749-4dee-b272-b21512eef31a" containerID="c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083" exitCode=0 Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.288434 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" event={"ID":"02f813e2-0749-4dee-b272-b21512eef31a","Type":"ContainerDied","Data":"c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083"} Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.288468 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" event={"ID":"02f813e2-0749-4dee-b272-b21512eef31a","Type":"ContainerDied","Data":"6bf67c3a8a039d9928193b959aadd03dfb3fe2f34ef365f39706e2fdcc2690f2"} Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.288489 4912 scope.go:117] "RemoveContainer" containerID="c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.288620 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84976bdf-w8bpw" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.293965 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" event={"ID":"d69c8959-7562-415d-bd31-6f8ed45750be","Type":"ContainerDied","Data":"4cea9222afba6a0b0bd1b4a51d769eb1aae06c76168ebc3e200ff8b3951fa8d9"} Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.293808 4912 generic.go:334] "Generic (PLEG): container finished" podID="d69c8959-7562-415d-bd31-6f8ed45750be" containerID="4cea9222afba6a0b0bd1b4a51d769eb1aae06c76168ebc3e200ff8b3951fa8d9" exitCode=0 Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.298005 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" event={"ID":"d69c8959-7562-415d-bd31-6f8ed45750be","Type":"ContainerStarted","Data":"30bf3b30da98cde711862e6e06d6f0dfb1a2ea13de4750074e71cd3273947595"} Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.316841 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6rxd\" (UniqueName: \"kubernetes.io/projected/02f813e2-0749-4dee-b272-b21512eef31a-kube-api-access-p6rxd\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.316883 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.316897 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.316907 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.316917 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02f813e2-0749-4dee-b272-b21512eef31a-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.328870 4912 scope.go:117] "RemoveContainer" containerID="cc95e2c918bc99ce57678b429b796d0af009f7035fccbc3fe8b58e6ccd921114" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.364855 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f84976bdf-w8bpw"] Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.384366 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f84976bdf-w8bpw"] Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.434187 4912 scope.go:117] "RemoveContainer" containerID="c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083" Dec 08 21:41:00 crc kubenswrapper[4912]: E1208 21:41:00.434936 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083\": container with ID starting with c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083 not found: ID does not exist" containerID="c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.435097 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083"} err="failed to get container status \"c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083\": rpc error: code = NotFound desc = could not find container \"c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083\": container with ID starting with c9be51088a487f76ea821b9e594b5e1e19e3a793104f5e6827d1a9bc94e37083 not found: ID does not exist" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.435416 4912 scope.go:117] "RemoveContainer" containerID="cc95e2c918bc99ce57678b429b796d0af009f7035fccbc3fe8b58e6ccd921114" Dec 08 21:41:00 crc kubenswrapper[4912]: E1208 21:41:00.436341 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc95e2c918bc99ce57678b429b796d0af009f7035fccbc3fe8b58e6ccd921114\": container with ID starting with cc95e2c918bc99ce57678b429b796d0af009f7035fccbc3fe8b58e6ccd921114 not found: ID does not exist" containerID="cc95e2c918bc99ce57678b429b796d0af009f7035fccbc3fe8b58e6ccd921114" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.436560 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc95e2c918bc99ce57678b429b796d0af009f7035fccbc3fe8b58e6ccd921114"} err="failed to get container status \"cc95e2c918bc99ce57678b429b796d0af009f7035fccbc3fe8b58e6ccd921114\": rpc error: code = NotFound desc = could not find container \"cc95e2c918bc99ce57678b429b796d0af009f7035fccbc3fe8b58e6ccd921114\": container with ID starting with cc95e2c918bc99ce57678b429b796d0af009f7035fccbc3fe8b58e6ccd921114 not found: ID does not exist" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.464522 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02f813e2-0749-4dee-b272-b21512eef31a" path="/var/lib/kubelet/pods/02f813e2-0749-4dee-b272-b21512eef31a/volumes" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.465913 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="127633c9-36c1-4593-a46c-fea139d969f8" path="/var/lib/kubelet/pods/127633c9-36c1-4593-a46c-fea139d969f8/volumes" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.466520 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29b0c15a-bb73-455f-a6b9-f0f4ba7cee04" path="/var/lib/kubelet/pods/29b0c15a-bb73-455f-a6b9-f0f4ba7cee04/volumes" Dec 08 21:41:00 crc kubenswrapper[4912]: I1208 21:41:00.499926 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:41:00 crc kubenswrapper[4912]: W1208 21:41:00.516582 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe88b255_f61e_4cf2_a795_14aecb94f665.slice/crio-2a75af7c0d4ed1f7b80180cb3294b9d2b078f5498f48dc5b3b3b0808fb3f762b WatchSource:0}: Error finding container 2a75af7c0d4ed1f7b80180cb3294b9d2b078f5498f48dc5b3b3b0808fb3f762b: Status 404 returned error can't find the container with id 2a75af7c0d4ed1f7b80180cb3294b9d2b078f5498f48dc5b3b3b0808fb3f762b Dec 08 21:41:01 crc kubenswrapper[4912]: I1208 21:41:01.310331 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be88b255-f61e-4cf2-a795-14aecb94f665","Type":"ContainerStarted","Data":"2a75af7c0d4ed1f7b80180cb3294b9d2b078f5498f48dc5b3b3b0808fb3f762b"} Dec 08 21:41:01 crc kubenswrapper[4912]: I1208 21:41:01.324117 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" event={"ID":"d69c8959-7562-415d-bd31-6f8ed45750be","Type":"ContainerStarted","Data":"74571c32809c409cf954e63974689352e644da009509bbbc2b6de008c25e8a31"} Dec 08 21:41:01 crc kubenswrapper[4912]: I1208 21:41:01.324218 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:41:01 crc kubenswrapper[4912]: I1208 21:41:01.328051 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"40102b0f-259c-4423-8934-6ab1397f2aa6","Type":"ContainerStarted","Data":"b0e0baed26f16b97f0843acce67bcbba0a6e74b12ace6197dba6c1189c5d159e"} Dec 08 21:41:01 crc kubenswrapper[4912]: I1208 21:41:01.351812 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" podStartSLOduration=3.351791487 podStartE2EDuration="3.351791487s" podCreationTimestamp="2025-12-08 21:40:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:01.347984594 +0000 UTC m=+1343.210986687" watchObservedRunningTime="2025-12-08 21:41:01.351791487 +0000 UTC m=+1343.214793580" Dec 08 21:41:01 crc kubenswrapper[4912]: I1208 21:41:01.382623 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.382603455 podStartE2EDuration="4.382603455s" podCreationTimestamp="2025-12-08 21:40:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:01.377025863 +0000 UTC m=+1343.240027946" watchObservedRunningTime="2025-12-08 21:41:01.382603455 +0000 UTC m=+1343.245605538" Dec 08 21:41:02 crc kubenswrapper[4912]: I1208 21:41:02.344652 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be88b255-f61e-4cf2-a795-14aecb94f665","Type":"ContainerStarted","Data":"0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c"} Dec 08 21:41:03 crc kubenswrapper[4912]: I1208 21:41:03.355262 4912 generic.go:334] "Generic (PLEG): container finished" podID="8a585189-dc76-4a0c-9b77-f5abbeb2d88b" containerID="306d4c646b69b4e0dc3a92c5f59d022bbc7639f39f2cbc9b0c37ba191d22cd7b" exitCode=0 Dec 08 21:41:03 crc kubenswrapper[4912]: I1208 21:41:03.355323 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7qx69" event={"ID":"8a585189-dc76-4a0c-9b77-f5abbeb2d88b","Type":"ContainerDied","Data":"306d4c646b69b4e0dc3a92c5f59d022bbc7639f39f2cbc9b0c37ba191d22cd7b"} Dec 08 21:41:05 crc kubenswrapper[4912]: I1208 21:41:05.312571 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:41:05 crc kubenswrapper[4912]: I1208 21:41:05.380783 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:41:05 crc kubenswrapper[4912]: I1208 21:41:05.381257 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="40102b0f-259c-4423-8934-6ab1397f2aa6" containerName="glance-log" containerID="cri-o://a1a12b4dd4803939f9ceb06f2e0379f7df229dddc20cf33b4f05b7f2807644de" gracePeriod=30 Dec 08 21:41:05 crc kubenswrapper[4912]: I1208 21:41:05.381493 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="40102b0f-259c-4423-8934-6ab1397f2aa6" containerName="glance-httpd" containerID="cri-o://b0e0baed26f16b97f0843acce67bcbba0a6e74b12ace6197dba6c1189c5d159e" gracePeriod=30 Dec 08 21:41:06 crc kubenswrapper[4912]: I1208 21:41:06.398990 4912 generic.go:334] "Generic (PLEG): container finished" podID="40102b0f-259c-4423-8934-6ab1397f2aa6" containerID="b0e0baed26f16b97f0843acce67bcbba0a6e74b12ace6197dba6c1189c5d159e" exitCode=0 Dec 08 21:41:06 crc kubenswrapper[4912]: I1208 21:41:06.399288 4912 generic.go:334] "Generic (PLEG): container finished" podID="40102b0f-259c-4423-8934-6ab1397f2aa6" containerID="a1a12b4dd4803939f9ceb06f2e0379f7df229dddc20cf33b4f05b7f2807644de" exitCode=143 Dec 08 21:41:06 crc kubenswrapper[4912]: I1208 21:41:06.399075 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"40102b0f-259c-4423-8934-6ab1397f2aa6","Type":"ContainerDied","Data":"b0e0baed26f16b97f0843acce67bcbba0a6e74b12ace6197dba6c1189c5d159e"} Dec 08 21:41:06 crc kubenswrapper[4912]: I1208 21:41:06.399324 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"40102b0f-259c-4423-8934-6ab1397f2aa6","Type":"ContainerDied","Data":"a1a12b4dd4803939f9ceb06f2e0379f7df229dddc20cf33b4f05b7f2807644de"} Dec 08 21:41:08 crc kubenswrapper[4912]: I1208 21:41:08.958241 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:41:09 crc kubenswrapper[4912]: I1208 21:41:09.025034 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-rhsc4"] Dec 08 21:41:09 crc kubenswrapper[4912]: I1208 21:41:09.025409 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-rhsc4" podUID="c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" containerName="dnsmasq-dns" containerID="cri-o://391dec488e0a2457e4b3f944424b62a2f7974216b2b1d4af655770d78cb3c958" gracePeriod=10 Dec 08 21:41:09 crc kubenswrapper[4912]: I1208 21:41:09.055167 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-rhsc4" podUID="c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: connect: connection refused" Dec 08 21:41:12 crc kubenswrapper[4912]: I1208 21:41:12.459357 4912 generic.go:334] "Generic (PLEG): container finished" podID="c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" containerID="391dec488e0a2457e4b3f944424b62a2f7974216b2b1d4af655770d78cb3c958" exitCode=0 Dec 08 21:41:12 crc kubenswrapper[4912]: I1208 21:41:12.459441 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-rhsc4" event={"ID":"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb","Type":"ContainerDied","Data":"391dec488e0a2457e4b3f944424b62a2f7974216b2b1d4af655770d78cb3c958"} Dec 08 21:41:14 crc kubenswrapper[4912]: I1208 21:41:14.054840 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-rhsc4" podUID="c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: connect: connection refused" Dec 08 21:41:18 crc kubenswrapper[4912]: E1208 21:41:18.850470 4912 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 08 21:41:18 crc kubenswrapper[4912]: E1208 21:41:18.851166 4912 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8qpqm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-djggl_openstack(06e69c2b-f54a-466a-9f5c-60499b4f5123): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 21:41:18 crc kubenswrapper[4912]: E1208 21:41:18.853078 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-djggl" podUID="06e69c2b-f54a-466a-9f5c-60499b4f5123" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.055308 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.089791 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-scripts\") pod \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.089912 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-credential-keys\") pod \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.089976 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-config-data\") pod \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.089997 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-combined-ca-bundle\") pod \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.090061 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-fernet-keys\") pod \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.090102 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n98h4\" (UniqueName: \"kubernetes.io/projected/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-kube-api-access-n98h4\") pod \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\" (UID: \"8a585189-dc76-4a0c-9b77-f5abbeb2d88b\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.095587 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-kube-api-access-n98h4" (OuterVolumeSpecName: "kube-api-access-n98h4") pod "8a585189-dc76-4a0c-9b77-f5abbeb2d88b" (UID: "8a585189-dc76-4a0c-9b77-f5abbeb2d88b"). InnerVolumeSpecName "kube-api-access-n98h4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.095605 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8a585189-dc76-4a0c-9b77-f5abbeb2d88b" (UID: "8a585189-dc76-4a0c-9b77-f5abbeb2d88b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.106503 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-scripts" (OuterVolumeSpecName: "scripts") pod "8a585189-dc76-4a0c-9b77-f5abbeb2d88b" (UID: "8a585189-dc76-4a0c-9b77-f5abbeb2d88b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.114337 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8a585189-dc76-4a0c-9b77-f5abbeb2d88b" (UID: "8a585189-dc76-4a0c-9b77-f5abbeb2d88b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.165795 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a585189-dc76-4a0c-9b77-f5abbeb2d88b" (UID: "8a585189-dc76-4a0c-9b77-f5abbeb2d88b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.181390 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.186173 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-config-data" (OuterVolumeSpecName: "config-data") pod "8a585189-dc76-4a0c-9b77-f5abbeb2d88b" (UID: "8a585189-dc76-4a0c-9b77-f5abbeb2d88b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.193347 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.193380 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.193393 4912 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.193405 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n98h4\" (UniqueName: \"kubernetes.io/projected/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-kube-api-access-n98h4\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.193416 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.193425 4912 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8a585189-dc76-4a0c-9b77-f5abbeb2d88b-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.245283 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.294723 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-scripts\") pod \"40102b0f-259c-4423-8934-6ab1397f2aa6\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.294862 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-dns-svc\") pod \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.294905 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/40102b0f-259c-4423-8934-6ab1397f2aa6-httpd-run\") pod \"40102b0f-259c-4423-8934-6ab1397f2aa6\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.294954 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-config\") pod \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.295335 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40102b0f-259c-4423-8934-6ab1397f2aa6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "40102b0f-259c-4423-8934-6ab1397f2aa6" (UID: "40102b0f-259c-4423-8934-6ab1397f2aa6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.295379 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40102b0f-259c-4423-8934-6ab1397f2aa6-logs\") pod \"40102b0f-259c-4423-8934-6ab1397f2aa6\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.295405 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldzq4\" (UniqueName: \"kubernetes.io/projected/40102b0f-259c-4423-8934-6ab1397f2aa6-kube-api-access-ldzq4\") pod \"40102b0f-259c-4423-8934-6ab1397f2aa6\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.295622 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40102b0f-259c-4423-8934-6ab1397f2aa6-logs" (OuterVolumeSpecName: "logs") pod "40102b0f-259c-4423-8934-6ab1397f2aa6" (UID: "40102b0f-259c-4423-8934-6ab1397f2aa6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.295976 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"40102b0f-259c-4423-8934-6ab1397f2aa6\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.296012 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-ovsdbserver-nb\") pod \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.296068 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-ovsdbserver-sb\") pod \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.296090 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fv4xf\" (UniqueName: \"kubernetes.io/projected/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-kube-api-access-fv4xf\") pod \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\" (UID: \"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.296123 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-config-data\") pod \"40102b0f-259c-4423-8934-6ab1397f2aa6\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.296151 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-combined-ca-bundle\") pod \"40102b0f-259c-4423-8934-6ab1397f2aa6\" (UID: \"40102b0f-259c-4423-8934-6ab1397f2aa6\") " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.296608 4912 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/40102b0f-259c-4423-8934-6ab1397f2aa6-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.296644 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40102b0f-259c-4423-8934-6ab1397f2aa6-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.299456 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40102b0f-259c-4423-8934-6ab1397f2aa6-kube-api-access-ldzq4" (OuterVolumeSpecName: "kube-api-access-ldzq4") pod "40102b0f-259c-4423-8934-6ab1397f2aa6" (UID: "40102b0f-259c-4423-8934-6ab1397f2aa6"). InnerVolumeSpecName "kube-api-access-ldzq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.304866 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-scripts" (OuterVolumeSpecName: "scripts") pod "40102b0f-259c-4423-8934-6ab1397f2aa6" (UID: "40102b0f-259c-4423-8934-6ab1397f2aa6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.304935 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-kube-api-access-fv4xf" (OuterVolumeSpecName: "kube-api-access-fv4xf") pod "c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" (UID: "c09d3deb-52f6-4cdd-9df5-1c4898cae7cb"). InnerVolumeSpecName "kube-api-access-fv4xf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.318554 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd" (OuterVolumeSpecName: "glance") pod "40102b0f-259c-4423-8934-6ab1397f2aa6" (UID: "40102b0f-259c-4423-8934-6ab1397f2aa6"). InnerVolumeSpecName "pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.358157 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-config" (OuterVolumeSpecName: "config") pod "c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" (UID: "c09d3deb-52f6-4cdd-9df5-1c4898cae7cb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.358179 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" (UID: "c09d3deb-52f6-4cdd-9df5-1c4898cae7cb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.360580 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" (UID: "c09d3deb-52f6-4cdd-9df5-1c4898cae7cb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.365204 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-config-data" (OuterVolumeSpecName: "config-data") pod "40102b0f-259c-4423-8934-6ab1397f2aa6" (UID: "40102b0f-259c-4423-8934-6ab1397f2aa6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.365715 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40102b0f-259c-4423-8934-6ab1397f2aa6" (UID: "40102b0f-259c-4423-8934-6ab1397f2aa6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.380592 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" (UID: "c09d3deb-52f6-4cdd-9df5-1c4898cae7cb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.398425 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.398461 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.398470 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldzq4\" (UniqueName: \"kubernetes.io/projected/40102b0f-259c-4423-8934-6ab1397f2aa6-kube-api-access-ldzq4\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.398507 4912 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") on node \"crc\" " Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.398519 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.398534 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.398542 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fv4xf\" (UniqueName: \"kubernetes.io/projected/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb-kube-api-access-fv4xf\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.398554 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.398562 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.398569 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40102b0f-259c-4423-8934-6ab1397f2aa6-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.417957 4912 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.418108 4912 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd") on node "crc" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.501664 4912 reconciler_common.go:293] "Volume detached for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.526743 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-7hldc" event={"ID":"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a","Type":"ContainerStarted","Data":"e69a7d63d022206f98327b1e8ce0137065f501e6071416f527c30f5f1473d807"} Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.530212 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-krnh9" event={"ID":"29694038-be1b-4d16-95ce-16c516b0f8bf","Type":"ContainerStarted","Data":"cdaf6e741d17ca0448781b733bfa0eab46c78ca7a352c65340b22f2d14ddb87b"} Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.539637 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be88b255-f61e-4cf2-a795-14aecb94f665","Type":"ContainerStarted","Data":"f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e"} Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.539687 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="be88b255-f61e-4cf2-a795-14aecb94f665" containerName="glance-log" containerID="cri-o://0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c" gracePeriod=30 Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.539807 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="be88b255-f61e-4cf2-a795-14aecb94f665" containerName="glance-httpd" containerID="cri-o://f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e" gracePeriod=30 Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.546758 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-rhsc4" event={"ID":"c09d3deb-52f6-4cdd-9df5-1c4898cae7cb","Type":"ContainerDied","Data":"7faa65e01b614b276983caf5ffce13cef336779798921ed0938f63e14694df74"} Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.546910 4912 scope.go:117] "RemoveContainer" containerID="391dec488e0a2457e4b3f944424b62a2f7974216b2b1d4af655770d78cb3c958" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.547106 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-rhsc4" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.552305 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7qx69" event={"ID":"8a585189-dc76-4a0c-9b77-f5abbeb2d88b","Type":"ContainerDied","Data":"d1d927984219fb611d9a93650b429b7f50c9fb52ab20bfb1549db7ebb51cded3"} Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.552340 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1d927984219fb611d9a93650b429b7f50c9fb52ab20bfb1549db7ebb51cded3" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.552403 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7qx69" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.553914 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-7hldc" podStartSLOduration=2.649542327 podStartE2EDuration="24.553890905s" podCreationTimestamp="2025-12-08 21:40:55 +0000 UTC" firstStartedPulling="2025-12-08 21:40:56.936843841 +0000 UTC m=+1338.799845924" lastFinishedPulling="2025-12-08 21:41:18.841192419 +0000 UTC m=+1360.704194502" observedRunningTime="2025-12-08 21:41:19.546078593 +0000 UTC m=+1361.409080676" watchObservedRunningTime="2025-12-08 21:41:19.553890905 +0000 UTC m=+1361.416892988" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.563639 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"40102b0f-259c-4423-8934-6ab1397f2aa6","Type":"ContainerDied","Data":"84971522fe57d673aaf8078e6bbe4cc582ccf9ddfb341835cee13de37b32e74c"} Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.563677 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:19 crc kubenswrapper[4912]: E1208 21:41:19.587944 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-djggl" podUID="06e69c2b-f54a-466a-9f5c-60499b4f5123" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.588155 4912 scope.go:117] "RemoveContainer" containerID="8d1ab3da369d33d9f72100655124f91c9c550d6361224eaf60cbdc9fb7f64537" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.589531 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=20.589510754 podStartE2EDuration="20.589510754s" podCreationTimestamp="2025-12-08 21:40:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:19.569411347 +0000 UTC m=+1361.432413430" watchObservedRunningTime="2025-12-08 21:41:19.589510754 +0000 UTC m=+1361.452512837" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.595155 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-krnh9" podStartSLOduration=2.6605832080000003 podStartE2EDuration="24.595138917s" podCreationTimestamp="2025-12-08 21:40:55 +0000 UTC" firstStartedPulling="2025-12-08 21:40:56.92025204 +0000 UTC m=+1338.783254123" lastFinishedPulling="2025-12-08 21:41:18.854807749 +0000 UTC m=+1360.717809832" observedRunningTime="2025-12-08 21:41:19.591870528 +0000 UTC m=+1361.454872601" watchObservedRunningTime="2025-12-08 21:41:19.595138917 +0000 UTC m=+1361.458141000" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.699682 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.710638 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.722175 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-rhsc4"] Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.732380 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-rhsc4"] Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.732745 4912 scope.go:117] "RemoveContainer" containerID="b0e0baed26f16b97f0843acce67bcbba0a6e74b12ace6197dba6c1189c5d159e" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.743785 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:41:19 crc kubenswrapper[4912]: E1208 21:41:19.744524 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40102b0f-259c-4423-8934-6ab1397f2aa6" containerName="glance-httpd" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.744548 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="40102b0f-259c-4423-8934-6ab1397f2aa6" containerName="glance-httpd" Dec 08 21:41:19 crc kubenswrapper[4912]: E1208 21:41:19.744568 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a585189-dc76-4a0c-9b77-f5abbeb2d88b" containerName="keystone-bootstrap" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.744577 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a585189-dc76-4a0c-9b77-f5abbeb2d88b" containerName="keystone-bootstrap" Dec 08 21:41:19 crc kubenswrapper[4912]: E1208 21:41:19.744612 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" containerName="dnsmasq-dns" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.744619 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" containerName="dnsmasq-dns" Dec 08 21:41:19 crc kubenswrapper[4912]: E1208 21:41:19.744632 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02f813e2-0749-4dee-b272-b21512eef31a" containerName="init" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.744641 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="02f813e2-0749-4dee-b272-b21512eef31a" containerName="init" Dec 08 21:41:19 crc kubenswrapper[4912]: E1208 21:41:19.744650 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40102b0f-259c-4423-8934-6ab1397f2aa6" containerName="glance-log" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.744657 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="40102b0f-259c-4423-8934-6ab1397f2aa6" containerName="glance-log" Dec 08 21:41:19 crc kubenswrapper[4912]: E1208 21:41:19.744675 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" containerName="init" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.744682 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" containerName="init" Dec 08 21:41:19 crc kubenswrapper[4912]: E1208 21:41:19.744694 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02f813e2-0749-4dee-b272-b21512eef31a" containerName="dnsmasq-dns" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.744701 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="02f813e2-0749-4dee-b272-b21512eef31a" containerName="dnsmasq-dns" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.744937 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="02f813e2-0749-4dee-b272-b21512eef31a" containerName="dnsmasq-dns" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.744955 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a585189-dc76-4a0c-9b77-f5abbeb2d88b" containerName="keystone-bootstrap" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.744964 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" containerName="dnsmasq-dns" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.744983 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="40102b0f-259c-4423-8934-6ab1397f2aa6" containerName="glance-log" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.744994 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="40102b0f-259c-4423-8934-6ab1397f2aa6" containerName="glance-httpd" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.746293 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.751414 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.751662 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.765772 4912 scope.go:117] "RemoveContainer" containerID="a1a12b4dd4803939f9ceb06f2e0379f7df229dddc20cf33b4f05b7f2807644de" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.771537 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.908883 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dfea36e4-18b5-4e25-a169-f91e8058ee69-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.908954 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-config-data\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.908995 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-scripts\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.909025 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.909083 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfea36e4-18b5-4e25-a169-f91e8058ee69-logs\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.909122 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.909177 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:19 crc kubenswrapper[4912]: I1208 21:41:19.909231 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bmb5\" (UniqueName: \"kubernetes.io/projected/dfea36e4-18b5-4e25-a169-f91e8058ee69-kube-api-access-7bmb5\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.011063 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-scripts\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.011113 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.011152 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfea36e4-18b5-4e25-a169-f91e8058ee69-logs\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.011185 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.011231 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.011264 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bmb5\" (UniqueName: \"kubernetes.io/projected/dfea36e4-18b5-4e25-a169-f91e8058ee69-kube-api-access-7bmb5\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.011327 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dfea36e4-18b5-4e25-a169-f91e8058ee69-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.011349 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-config-data\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.012903 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dfea36e4-18b5-4e25-a169-f91e8058ee69-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.012966 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfea36e4-18b5-4e25-a169-f91e8058ee69-logs\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.017472 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-scripts\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.018425 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.018484 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0f170b0bc0d5e657f5d6976432df9f5b93559f3f6e739297a830b8d1908e7cee/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.021876 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.027294 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-config-data\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.027349 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.038511 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bmb5\" (UniqueName: \"kubernetes.io/projected/dfea36e4-18b5-4e25-a169-f91e8058ee69-kube-api-access-7bmb5\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.056872 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"glance-default-internal-api-0\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.074126 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.200642 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.255081 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7qx69"] Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.265678 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7qx69"] Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.316298 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be88b255-f61e-4cf2-a795-14aecb94f665-logs\") pod \"be88b255-f61e-4cf2-a795-14aecb94f665\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.316483 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-combined-ca-bundle\") pod \"be88b255-f61e-4cf2-a795-14aecb94f665\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.316517 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-config-data\") pod \"be88b255-f61e-4cf2-a795-14aecb94f665\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.316686 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"be88b255-f61e-4cf2-a795-14aecb94f665\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.316755 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q24rz\" (UniqueName: \"kubernetes.io/projected/be88b255-f61e-4cf2-a795-14aecb94f665-kube-api-access-q24rz\") pod \"be88b255-f61e-4cf2-a795-14aecb94f665\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.316793 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-scripts\") pod \"be88b255-f61e-4cf2-a795-14aecb94f665\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.316817 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be88b255-f61e-4cf2-a795-14aecb94f665-httpd-run\") pod \"be88b255-f61e-4cf2-a795-14aecb94f665\" (UID: \"be88b255-f61e-4cf2-a795-14aecb94f665\") " Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.317630 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be88b255-f61e-4cf2-a795-14aecb94f665-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "be88b255-f61e-4cf2-a795-14aecb94f665" (UID: "be88b255-f61e-4cf2-a795-14aecb94f665"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.320589 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be88b255-f61e-4cf2-a795-14aecb94f665-logs" (OuterVolumeSpecName: "logs") pod "be88b255-f61e-4cf2-a795-14aecb94f665" (UID: "be88b255-f61e-4cf2-a795-14aecb94f665"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.326872 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be88b255-f61e-4cf2-a795-14aecb94f665-kube-api-access-q24rz" (OuterVolumeSpecName: "kube-api-access-q24rz") pod "be88b255-f61e-4cf2-a795-14aecb94f665" (UID: "be88b255-f61e-4cf2-a795-14aecb94f665"). InnerVolumeSpecName "kube-api-access-q24rz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.330632 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-scripts" (OuterVolumeSpecName: "scripts") pod "be88b255-f61e-4cf2-a795-14aecb94f665" (UID: "be88b255-f61e-4cf2-a795-14aecb94f665"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.351589 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0" (OuterVolumeSpecName: "glance") pod "be88b255-f61e-4cf2-a795-14aecb94f665" (UID: "be88b255-f61e-4cf2-a795-14aecb94f665"). InnerVolumeSpecName "pvc-f73b914b-b926-44ad-a1ae-1553428892b0". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.361219 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-dpz7w"] Dec 08 21:41:20 crc kubenswrapper[4912]: E1208 21:41:20.361730 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be88b255-f61e-4cf2-a795-14aecb94f665" containerName="glance-httpd" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.361743 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="be88b255-f61e-4cf2-a795-14aecb94f665" containerName="glance-httpd" Dec 08 21:41:20 crc kubenswrapper[4912]: E1208 21:41:20.361794 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be88b255-f61e-4cf2-a795-14aecb94f665" containerName="glance-log" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.361802 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="be88b255-f61e-4cf2-a795-14aecb94f665" containerName="glance-log" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.362071 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="be88b255-f61e-4cf2-a795-14aecb94f665" containerName="glance-log" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.362090 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="be88b255-f61e-4cf2-a795-14aecb94f665" containerName="glance-httpd" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.362772 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.365625 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.365730 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.369208 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be88b255-f61e-4cf2-a795-14aecb94f665" (UID: "be88b255-f61e-4cf2-a795-14aecb94f665"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.366010 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.366084 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.367209 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-c727c" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.372169 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dpz7w"] Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.394710 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-config-data" (OuterVolumeSpecName: "config-data") pod "be88b255-f61e-4cf2-a795-14aecb94f665" (UID: "be88b255-f61e-4cf2-a795-14aecb94f665"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.419428 4912 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") on node \"crc\" " Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.419472 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q24rz\" (UniqueName: \"kubernetes.io/projected/be88b255-f61e-4cf2-a795-14aecb94f665-kube-api-access-q24rz\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.419487 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.419500 4912 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be88b255-f61e-4cf2-a795-14aecb94f665-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.419514 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be88b255-f61e-4cf2-a795-14aecb94f665-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.419526 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.419540 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be88b255-f61e-4cf2-a795-14aecb94f665-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.438886 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40102b0f-259c-4423-8934-6ab1397f2aa6" path="/var/lib/kubelet/pods/40102b0f-259c-4423-8934-6ab1397f2aa6/volumes" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.440019 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a585189-dc76-4a0c-9b77-f5abbeb2d88b" path="/var/lib/kubelet/pods/8a585189-dc76-4a0c-9b77-f5abbeb2d88b/volumes" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.440751 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" path="/var/lib/kubelet/pods/c09d3deb-52f6-4cdd-9df5-1c4898cae7cb/volumes" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.441859 4912 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.442099 4912 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-f73b914b-b926-44ad-a1ae-1553428892b0" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0") on node "crc" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.521673 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-credential-keys\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.521742 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-scripts\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.521930 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtlr9\" (UniqueName: \"kubernetes.io/projected/47140ad7-c2c2-4c61-97da-17341fccc09b-kube-api-access-xtlr9\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.522176 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-config-data\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.522212 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-combined-ca-bundle\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.522261 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-fernet-keys\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.522483 4912 reconciler_common.go:293] "Volume detached for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.580706 4912 generic.go:334] "Generic (PLEG): container finished" podID="be88b255-f61e-4cf2-a795-14aecb94f665" containerID="f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e" exitCode=143 Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.580750 4912 generic.go:334] "Generic (PLEG): container finished" podID="be88b255-f61e-4cf2-a795-14aecb94f665" containerID="0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c" exitCode=143 Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.580806 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be88b255-f61e-4cf2-a795-14aecb94f665","Type":"ContainerDied","Data":"f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e"} Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.580838 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.580867 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be88b255-f61e-4cf2-a795-14aecb94f665","Type":"ContainerDied","Data":"0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c"} Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.580880 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be88b255-f61e-4cf2-a795-14aecb94f665","Type":"ContainerDied","Data":"2a75af7c0d4ed1f7b80180cb3294b9d2b078f5498f48dc5b3b3b0808fb3f762b"} Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.580896 4912 scope.go:117] "RemoveContainer" containerID="f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.623413 4912 scope.go:117] "RemoveContainer" containerID="0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.627138 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.627289 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-combined-ca-bundle\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.627399 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-fernet-keys\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.627471 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-credential-keys\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.627511 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-scripts\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.627684 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtlr9\" (UniqueName: \"kubernetes.io/projected/47140ad7-c2c2-4c61-97da-17341fccc09b-kube-api-access-xtlr9\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.627711 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-config-data\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.636075 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.636430 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-scripts\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.637387 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-config-data\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.637959 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-fernet-keys\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.638137 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-combined-ca-bundle\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.641532 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-credential-keys\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.659551 4912 scope.go:117] "RemoveContainer" containerID="f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e" Dec 08 21:41:20 crc kubenswrapper[4912]: E1208 21:41:20.681275 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e\": container with ID starting with f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e not found: ID does not exist" containerID="f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.681327 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e"} err="failed to get container status \"f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e\": rpc error: code = NotFound desc = could not find container \"f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e\": container with ID starting with f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e not found: ID does not exist" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.681389 4912 scope.go:117] "RemoveContainer" containerID="0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c" Dec 08 21:41:20 crc kubenswrapper[4912]: E1208 21:41:20.681921 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c\": container with ID starting with 0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c not found: ID does not exist" containerID="0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.681980 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c"} err="failed to get container status \"0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c\": rpc error: code = NotFound desc = could not find container \"0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c\": container with ID starting with 0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c not found: ID does not exist" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.682017 4912 scope.go:117] "RemoveContainer" containerID="f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.682257 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.682565 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e"} err="failed to get container status \"f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e\": rpc error: code = NotFound desc = could not find container \"f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e\": container with ID starting with f9a88b8d46b2b1fbf62a2a88abf4f71bab061a1cd3194dae1e41da4f00e8153e not found: ID does not exist" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.682611 4912 scope.go:117] "RemoveContainer" containerID="0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.683747 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c"} err="failed to get container status \"0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c\": rpc error: code = NotFound desc = could not find container \"0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c\": container with ID starting with 0fba47dffe80f675839d066c270b9f89c867c32a91c94ae1bd73915267db883c not found: ID does not exist" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.686835 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.689641 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.690262 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.693917 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtlr9\" (UniqueName: \"kubernetes.io/projected/47140ad7-c2c2-4c61-97da-17341fccc09b-kube-api-access-xtlr9\") pod \"keystone-bootstrap-dpz7w\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.699796 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.727604 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:41:20 crc kubenswrapper[4912]: W1208 21:41:20.731772 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddfea36e4_18b5_4e25_a169_f91e8058ee69.slice/crio-c2bdfe2ea8fec08547a925cb4d6fd017b8598823c2d7d430be6d6ea6f3e6b37e WatchSource:0}: Error finding container c2bdfe2ea8fec08547a925cb4d6fd017b8598823c2d7d430be6d6ea6f3e6b37e: Status 404 returned error can't find the container with id c2bdfe2ea8fec08547a925cb4d6fd017b8598823c2d7d430be6d6ea6f3e6b37e Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.741785 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.842892 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.842951 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.842977 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-scripts\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.843003 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.843061 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/931060bd-5dcf-4163-9da8-aed7374af480-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.843107 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jgxg\" (UniqueName: \"kubernetes.io/projected/931060bd-5dcf-4163-9da8-aed7374af480-kube-api-access-8jgxg\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.843301 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-config-data\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.843645 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/931060bd-5dcf-4163-9da8-aed7374af480-logs\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.945838 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-config-data\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.946333 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/931060bd-5dcf-4163-9da8-aed7374af480-logs\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.946367 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.946420 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.946452 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-scripts\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.946497 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.947211 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/931060bd-5dcf-4163-9da8-aed7374af480-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.947288 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jgxg\" (UniqueName: \"kubernetes.io/projected/931060bd-5dcf-4163-9da8-aed7374af480-kube-api-access-8jgxg\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.949091 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/931060bd-5dcf-4163-9da8-aed7374af480-logs\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.949698 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/931060bd-5dcf-4163-9da8-aed7374af480-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.952668 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.952695 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.952784 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f04dae7735d5049d0d88a291850a74e98319e9b887dd9c8e9e7d7d4d3762e2c6/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.953392 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-config-data\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.955316 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-scripts\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.956376 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.965209 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jgxg\" (UniqueName: \"kubernetes.io/projected/931060bd-5dcf-4163-9da8-aed7374af480-kube-api-access-8jgxg\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:20 crc kubenswrapper[4912]: I1208 21:41:20.996349 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " pod="openstack/glance-default-external-api-0" Dec 08 21:41:21 crc kubenswrapper[4912]: I1208 21:41:21.025625 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:41:21 crc kubenswrapper[4912]: I1208 21:41:21.229456 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dpz7w"] Dec 08 21:41:21 crc kubenswrapper[4912]: I1208 21:41:21.594773 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dpz7w" event={"ID":"47140ad7-c2c2-4c61-97da-17341fccc09b","Type":"ContainerStarted","Data":"6129e6851894b9dce578cfad956a60d0eae3bea487a2fe3f1ea364442ff2ea21"} Dec 08 21:41:21 crc kubenswrapper[4912]: I1208 21:41:21.595223 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dpz7w" event={"ID":"47140ad7-c2c2-4c61-97da-17341fccc09b","Type":"ContainerStarted","Data":"0cde3c52b443664f0dc9035a1531e5cf4e708e104ee213be73b1a62a0143cbff"} Dec 08 21:41:21 crc kubenswrapper[4912]: I1208 21:41:21.598664 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dfea36e4-18b5-4e25-a169-f91e8058ee69","Type":"ContainerStarted","Data":"4e04b31cf9796cf0e96c2c4f2db773b81af3dc958ecd94485556a6f4b88dbd0c"} Dec 08 21:41:21 crc kubenswrapper[4912]: I1208 21:41:21.598743 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dfea36e4-18b5-4e25-a169-f91e8058ee69","Type":"ContainerStarted","Data":"c2bdfe2ea8fec08547a925cb4d6fd017b8598823c2d7d430be6d6ea6f3e6b37e"} Dec 08 21:41:21 crc kubenswrapper[4912]: I1208 21:41:21.630701 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:41:21 crc kubenswrapper[4912]: I1208 21:41:21.632634 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-dpz7w" podStartSLOduration=1.632623448 podStartE2EDuration="1.632623448s" podCreationTimestamp="2025-12-08 21:41:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:21.622232151 +0000 UTC m=+1363.485234244" watchObservedRunningTime="2025-12-08 21:41:21.632623448 +0000 UTC m=+1363.495625531" Dec 08 21:41:21 crc kubenswrapper[4912]: W1208 21:41:21.633384 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod931060bd_5dcf_4163_9da8_aed7374af480.slice/crio-08b73f72942b6f6d81459a94414b4c8c57a3af4a7025cb6752a7af9be7e0f4ec WatchSource:0}: Error finding container 08b73f72942b6f6d81459a94414b4c8c57a3af4a7025cb6752a7af9be7e0f4ec: Status 404 returned error can't find the container with id 08b73f72942b6f6d81459a94414b4c8c57a3af4a7025cb6752a7af9be7e0f4ec Dec 08 21:41:22 crc kubenswrapper[4912]: I1208 21:41:22.439365 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be88b255-f61e-4cf2-a795-14aecb94f665" path="/var/lib/kubelet/pods/be88b255-f61e-4cf2-a795-14aecb94f665/volumes" Dec 08 21:41:22 crc kubenswrapper[4912]: I1208 21:41:22.621761 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dfea36e4-18b5-4e25-a169-f91e8058ee69","Type":"ContainerStarted","Data":"51d517c990744464e25749722ed2c757dfeef6fe3ac767932698e077889a82c9"} Dec 08 21:41:22 crc kubenswrapper[4912]: I1208 21:41:22.625926 4912 generic.go:334] "Generic (PLEG): container finished" podID="c0aefa1f-6b89-4aa1-b25e-3f3275ed571a" containerID="e69a7d63d022206f98327b1e8ce0137065f501e6071416f527c30f5f1473d807" exitCode=0 Dec 08 21:41:22 crc kubenswrapper[4912]: I1208 21:41:22.626002 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-7hldc" event={"ID":"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a","Type":"ContainerDied","Data":"e69a7d63d022206f98327b1e8ce0137065f501e6071416f527c30f5f1473d807"} Dec 08 21:41:22 crc kubenswrapper[4912]: I1208 21:41:22.630946 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"931060bd-5dcf-4163-9da8-aed7374af480","Type":"ContainerStarted","Data":"0a36acd2d6a74c1622042414d9487cf2bcb46fab5b6db7fdad9cf6670fea0fd0"} Dec 08 21:41:22 crc kubenswrapper[4912]: I1208 21:41:22.630993 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"931060bd-5dcf-4163-9da8-aed7374af480","Type":"ContainerStarted","Data":"08b73f72942b6f6d81459a94414b4c8c57a3af4a7025cb6752a7af9be7e0f4ec"} Dec 08 21:41:22 crc kubenswrapper[4912]: I1208 21:41:22.655580 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.655557027 podStartE2EDuration="3.655557027s" podCreationTimestamp="2025-12-08 21:41:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:22.64750357 +0000 UTC m=+1364.510505653" watchObservedRunningTime="2025-12-08 21:41:22.655557027 +0000 UTC m=+1364.518559110" Dec 08 21:41:23 crc kubenswrapper[4912]: I1208 21:41:23.639745 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"931060bd-5dcf-4163-9da8-aed7374af480","Type":"ContainerStarted","Data":"598783c498f4bc2640e24c42329e86797a5374c25e552dcf78371d0a1d067a6e"} Dec 08 21:41:23 crc kubenswrapper[4912]: I1208 21:41:23.676241 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.676214977 podStartE2EDuration="3.676214977s" podCreationTimestamp="2025-12-08 21:41:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:23.661808987 +0000 UTC m=+1365.524811090" watchObservedRunningTime="2025-12-08 21:41:23.676214977 +0000 UTC m=+1365.539217060" Dec 08 21:41:23 crc kubenswrapper[4912]: I1208 21:41:23.964251 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-7hldc" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.054350 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-rhsc4" podUID="c09d3deb-52f6-4cdd-9df5-1c4898cae7cb" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: i/o timeout" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.108808 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-scripts\") pod \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.108957 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-combined-ca-bundle\") pod \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.109007 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcmp7\" (UniqueName: \"kubernetes.io/projected/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-kube-api-access-zcmp7\") pod \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.109159 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-logs\") pod \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.109179 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-config-data\") pod \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\" (UID: \"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a\") " Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.110261 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-logs" (OuterVolumeSpecName: "logs") pod "c0aefa1f-6b89-4aa1-b25e-3f3275ed571a" (UID: "c0aefa1f-6b89-4aa1-b25e-3f3275ed571a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.115063 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-scripts" (OuterVolumeSpecName: "scripts") pod "c0aefa1f-6b89-4aa1-b25e-3f3275ed571a" (UID: "c0aefa1f-6b89-4aa1-b25e-3f3275ed571a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.118150 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-kube-api-access-zcmp7" (OuterVolumeSpecName: "kube-api-access-zcmp7") pod "c0aefa1f-6b89-4aa1-b25e-3f3275ed571a" (UID: "c0aefa1f-6b89-4aa1-b25e-3f3275ed571a"). InnerVolumeSpecName "kube-api-access-zcmp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.138108 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0aefa1f-6b89-4aa1-b25e-3f3275ed571a" (UID: "c0aefa1f-6b89-4aa1-b25e-3f3275ed571a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.138476 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-config-data" (OuterVolumeSpecName: "config-data") pod "c0aefa1f-6b89-4aa1-b25e-3f3275ed571a" (UID: "c0aefa1f-6b89-4aa1-b25e-3f3275ed571a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.211884 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.211932 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcmp7\" (UniqueName: \"kubernetes.io/projected/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-kube-api-access-zcmp7\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.211948 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.211960 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.211970 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.650462 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-7hldc" event={"ID":"c0aefa1f-6b89-4aa1-b25e-3f3275ed571a","Type":"ContainerDied","Data":"4c69dbe1999e3500b7a759fd8ea11e893ac165ec23dd6472ecebeedbbd883f2f"} Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.650514 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-7hldc" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.650527 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c69dbe1999e3500b7a759fd8ea11e893ac165ec23dd6472ecebeedbbd883f2f" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.755097 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-868f84fc6-f7svl"] Dec 08 21:41:24 crc kubenswrapper[4912]: E1208 21:41:24.757755 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0aefa1f-6b89-4aa1-b25e-3f3275ed571a" containerName="placement-db-sync" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.757794 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0aefa1f-6b89-4aa1-b25e-3f3275ed571a" containerName="placement-db-sync" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.758029 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0aefa1f-6b89-4aa1-b25e-3f3275ed571a" containerName="placement-db-sync" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.759826 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.763182 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.763580 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.764493 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-zv9zp" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.764676 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.764988 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.772449 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-868f84fc6-f7svl"] Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.925096 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-logs\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.925395 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-scripts\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.925449 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-combined-ca-bundle\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.925737 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-public-tls-certs\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.925774 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5874l\" (UniqueName: \"kubernetes.io/projected/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-kube-api-access-5874l\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.925812 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-internal-tls-certs\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:24 crc kubenswrapper[4912]: I1208 21:41:24.925835 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-config-data\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.027463 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-scripts\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.027515 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-combined-ca-bundle\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.027592 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-public-tls-certs\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.027800 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5874l\" (UniqueName: \"kubernetes.io/projected/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-kube-api-access-5874l\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.027905 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-internal-tls-certs\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.027946 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-config-data\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.028071 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-logs\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.028488 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-logs\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.032912 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-public-tls-certs\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.033077 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-scripts\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.033178 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-config-data\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.033422 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-combined-ca-bundle\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.034132 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-internal-tls-certs\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.058139 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5874l\" (UniqueName: \"kubernetes.io/projected/826aae34-f2b3-4cf5-8d59-04a1ba33a2b5-kube-api-access-5874l\") pod \"placement-868f84fc6-f7svl\" (UID: \"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5\") " pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.079305 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.581855 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-868f84fc6-f7svl"] Dec 08 21:41:25 crc kubenswrapper[4912]: W1208 21:41:25.581943 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod826aae34_f2b3_4cf5_8d59_04a1ba33a2b5.slice/crio-6eef8a26bddc4c5954ece70268129389392c59240b99ea940df6ca3f04ec88e1 WatchSource:0}: Error finding container 6eef8a26bddc4c5954ece70268129389392c59240b99ea940df6ca3f04ec88e1: Status 404 returned error can't find the container with id 6eef8a26bddc4c5954ece70268129389392c59240b99ea940df6ca3f04ec88e1 Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.665200 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-868f84fc6-f7svl" event={"ID":"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5","Type":"ContainerStarted","Data":"6eef8a26bddc4c5954ece70268129389392c59240b99ea940df6ca3f04ec88e1"} Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.672009 4912 generic.go:334] "Generic (PLEG): container finished" podID="29694038-be1b-4d16-95ce-16c516b0f8bf" containerID="cdaf6e741d17ca0448781b733bfa0eab46c78ca7a352c65340b22f2d14ddb87b" exitCode=0 Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.672121 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-krnh9" event={"ID":"29694038-be1b-4d16-95ce-16c516b0f8bf","Type":"ContainerDied","Data":"cdaf6e741d17ca0448781b733bfa0eab46c78ca7a352c65340b22f2d14ddb87b"} Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.676207 4912 generic.go:334] "Generic (PLEG): container finished" podID="47140ad7-c2c2-4c61-97da-17341fccc09b" containerID="6129e6851894b9dce578cfad956a60d0eae3bea487a2fe3f1ea364442ff2ea21" exitCode=0 Dec 08 21:41:25 crc kubenswrapper[4912]: I1208 21:41:25.676250 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dpz7w" event={"ID":"47140ad7-c2c2-4c61-97da-17341fccc09b","Type":"ContainerDied","Data":"6129e6851894b9dce578cfad956a60d0eae3bea487a2fe3f1ea364442ff2ea21"} Dec 08 21:41:26 crc kubenswrapper[4912]: I1208 21:41:26.694428 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-868f84fc6-f7svl" event={"ID":"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5","Type":"ContainerStarted","Data":"32e0942a241fa5b38750131f1afe0240d1cd8a9ef3e9e06598e1aadc27245b1b"} Dec 08 21:41:26 crc kubenswrapper[4912]: I1208 21:41:26.695088 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-868f84fc6-f7svl" event={"ID":"826aae34-f2b3-4cf5-8d59-04a1ba33a2b5","Type":"ContainerStarted","Data":"acf654c87537de60f6465fe5f58b0d59aad25ff87365a0cdd0a56e0027ca52a7"} Dec 08 21:41:26 crc kubenswrapper[4912]: I1208 21:41:26.695150 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:26 crc kubenswrapper[4912]: I1208 21:41:26.695173 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.166240 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-krnh9" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.173378 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.189155 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-868f84fc6-f7svl" podStartSLOduration=3.189136467 podStartE2EDuration="3.189136467s" podCreationTimestamp="2025-12-08 21:41:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:26.728788255 +0000 UTC m=+1368.591790338" watchObservedRunningTime="2025-12-08 21:41:27.189136467 +0000 UTC m=+1369.052138550" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.276707 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqrl7\" (UniqueName: \"kubernetes.io/projected/29694038-be1b-4d16-95ce-16c516b0f8bf-kube-api-access-lqrl7\") pod \"29694038-be1b-4d16-95ce-16c516b0f8bf\" (UID: \"29694038-be1b-4d16-95ce-16c516b0f8bf\") " Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.276867 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/29694038-be1b-4d16-95ce-16c516b0f8bf-db-sync-config-data\") pod \"29694038-be1b-4d16-95ce-16c516b0f8bf\" (UID: \"29694038-be1b-4d16-95ce-16c516b0f8bf\") " Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.276935 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-scripts\") pod \"47140ad7-c2c2-4c61-97da-17341fccc09b\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.276973 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-credential-keys\") pod \"47140ad7-c2c2-4c61-97da-17341fccc09b\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.277021 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtlr9\" (UniqueName: \"kubernetes.io/projected/47140ad7-c2c2-4c61-97da-17341fccc09b-kube-api-access-xtlr9\") pod \"47140ad7-c2c2-4c61-97da-17341fccc09b\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.277070 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29694038-be1b-4d16-95ce-16c516b0f8bf-combined-ca-bundle\") pod \"29694038-be1b-4d16-95ce-16c516b0f8bf\" (UID: \"29694038-be1b-4d16-95ce-16c516b0f8bf\") " Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.277101 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-combined-ca-bundle\") pod \"47140ad7-c2c2-4c61-97da-17341fccc09b\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.277179 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-config-data\") pod \"47140ad7-c2c2-4c61-97da-17341fccc09b\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.277234 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-fernet-keys\") pod \"47140ad7-c2c2-4c61-97da-17341fccc09b\" (UID: \"47140ad7-c2c2-4c61-97da-17341fccc09b\") " Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.282707 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29694038-be1b-4d16-95ce-16c516b0f8bf-kube-api-access-lqrl7" (OuterVolumeSpecName: "kube-api-access-lqrl7") pod "29694038-be1b-4d16-95ce-16c516b0f8bf" (UID: "29694038-be1b-4d16-95ce-16c516b0f8bf"). InnerVolumeSpecName "kube-api-access-lqrl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.282785 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "47140ad7-c2c2-4c61-97da-17341fccc09b" (UID: "47140ad7-c2c2-4c61-97da-17341fccc09b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.283375 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-scripts" (OuterVolumeSpecName: "scripts") pod "47140ad7-c2c2-4c61-97da-17341fccc09b" (UID: "47140ad7-c2c2-4c61-97da-17341fccc09b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.283884 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47140ad7-c2c2-4c61-97da-17341fccc09b-kube-api-access-xtlr9" (OuterVolumeSpecName: "kube-api-access-xtlr9") pod "47140ad7-c2c2-4c61-97da-17341fccc09b" (UID: "47140ad7-c2c2-4c61-97da-17341fccc09b"). InnerVolumeSpecName "kube-api-access-xtlr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.284288 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "47140ad7-c2c2-4c61-97da-17341fccc09b" (UID: "47140ad7-c2c2-4c61-97da-17341fccc09b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.296250 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29694038-be1b-4d16-95ce-16c516b0f8bf-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "29694038-be1b-4d16-95ce-16c516b0f8bf" (UID: "29694038-be1b-4d16-95ce-16c516b0f8bf"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.303579 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-config-data" (OuterVolumeSpecName: "config-data") pod "47140ad7-c2c2-4c61-97da-17341fccc09b" (UID: "47140ad7-c2c2-4c61-97da-17341fccc09b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.305404 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29694038-be1b-4d16-95ce-16c516b0f8bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29694038-be1b-4d16-95ce-16c516b0f8bf" (UID: "29694038-be1b-4d16-95ce-16c516b0f8bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.308371 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47140ad7-c2c2-4c61-97da-17341fccc09b" (UID: "47140ad7-c2c2-4c61-97da-17341fccc09b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.380219 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29694038-be1b-4d16-95ce-16c516b0f8bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.380625 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.380661 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.380679 4912 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.380695 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqrl7\" (UniqueName: \"kubernetes.io/projected/29694038-be1b-4d16-95ce-16c516b0f8bf-kube-api-access-lqrl7\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.380715 4912 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/29694038-be1b-4d16-95ce-16c516b0f8bf-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.380727 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.380738 4912 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47140ad7-c2c2-4c61-97da-17341fccc09b-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.380753 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtlr9\" (UniqueName: \"kubernetes.io/projected/47140ad7-c2c2-4c61-97da-17341fccc09b-kube-api-access-xtlr9\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.707863 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-krnh9" event={"ID":"29694038-be1b-4d16-95ce-16c516b0f8bf","Type":"ContainerDied","Data":"f15ef95949a4b35ed00e708901029a5b9c15521ed9807a2d240fbc6b5bb84fde"} Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.707928 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f15ef95949a4b35ed00e708901029a5b9c15521ed9807a2d240fbc6b5bb84fde" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.707886 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-krnh9" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.712580 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dpz7w" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.712575 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dpz7w" event={"ID":"47140ad7-c2c2-4c61-97da-17341fccc09b","Type":"ContainerDied","Data":"0cde3c52b443664f0dc9035a1531e5cf4e708e104ee213be73b1a62a0143cbff"} Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.712719 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cde3c52b443664f0dc9035a1531e5cf4e708e104ee213be73b1a62a0143cbff" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.874817 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-d797fb44f-6g7nm"] Dec 08 21:41:27 crc kubenswrapper[4912]: E1208 21:41:27.875444 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47140ad7-c2c2-4c61-97da-17341fccc09b" containerName="keystone-bootstrap" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.875534 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="47140ad7-c2c2-4c61-97da-17341fccc09b" containerName="keystone-bootstrap" Dec 08 21:41:27 crc kubenswrapper[4912]: E1208 21:41:27.875614 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29694038-be1b-4d16-95ce-16c516b0f8bf" containerName="barbican-db-sync" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.875716 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="29694038-be1b-4d16-95ce-16c516b0f8bf" containerName="barbican-db-sync" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.876022 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="29694038-be1b-4d16-95ce-16c516b0f8bf" containerName="barbican-db-sync" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.876137 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="47140ad7-c2c2-4c61-97da-17341fccc09b" containerName="keystone-bootstrap" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.877000 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.881302 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.881675 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.883544 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.883889 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.884302 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.893674 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-c727c" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.894719 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-d797fb44f-6g7nm"] Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.990368 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-67f646cf6d-bpxc7"] Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.992270 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:27 crc kubenswrapper[4912]: I1208 21:41:27.999817 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.000301 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.000546 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-txdnh" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.001018 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twl45\" (UniqueName: \"kubernetes.io/projected/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-kube-api-access-twl45\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.001105 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-credential-keys\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.001145 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-internal-tls-certs\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.001259 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-fernet-keys\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.001297 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-scripts\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.001467 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-config-data\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.001551 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-public-tls-certs\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.001600 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-combined-ca-bundle\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.032771 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-67f588f949-sxst6"] Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.036084 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.040294 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.058648 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-67f646cf6d-bpxc7"] Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.071976 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-67f588f949-sxst6"] Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106381 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4697809d-6d20-495e-a2bb-9b39edb2a09c-config-data-custom\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106438 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4697809d-6d20-495e-a2bb-9b39edb2a09c-combined-ca-bundle\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106475 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twl45\" (UniqueName: \"kubernetes.io/projected/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-kube-api-access-twl45\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106504 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-credential-keys\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106532 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-internal-tls-certs\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106564 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4697809d-6d20-495e-a2bb-9b39edb2a09c-config-data\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106587 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4697809d-6d20-495e-a2bb-9b39edb2a09c-logs\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106610 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbpt9\" (UniqueName: \"kubernetes.io/projected/4697809d-6d20-495e-a2bb-9b39edb2a09c-kube-api-access-vbpt9\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106658 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-fernet-keys\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106679 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-scripts\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106712 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-config-data\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106733 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-public-tls-certs\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.106751 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-combined-ca-bundle\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.120804 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-public-tls-certs\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.120911 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-scripts\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.122140 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-credential-keys\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.126820 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-combined-ca-bundle\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.128879 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-fernet-keys\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.129821 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-internal-tls-certs\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.145349 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-config-data\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.150328 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twl45\" (UniqueName: \"kubernetes.io/projected/de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b-kube-api-access-twl45\") pod \"keystone-d797fb44f-6g7nm\" (UID: \"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b\") " pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.167096 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-mczwp"] Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.176383 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.186983 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-mczwp"] Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.208636 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b0929ad-736b-47b7-8868-99450e21af32-config-data\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.208956 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4697809d-6d20-495e-a2bb-9b39edb2a09c-config-data-custom\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.209079 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4697809d-6d20-495e-a2bb-9b39edb2a09c-combined-ca-bundle\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.209175 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b0929ad-736b-47b7-8868-99450e21af32-logs\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.209264 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk8tt\" (UniqueName: \"kubernetes.io/projected/7b0929ad-736b-47b7-8868-99450e21af32-kube-api-access-bk8tt\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.209371 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4697809d-6d20-495e-a2bb-9b39edb2a09c-config-data\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.209470 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4697809d-6d20-495e-a2bb-9b39edb2a09c-logs\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.209564 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b0929ad-736b-47b7-8868-99450e21af32-combined-ca-bundle\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.209657 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbpt9\" (UniqueName: \"kubernetes.io/projected/4697809d-6d20-495e-a2bb-9b39edb2a09c-kube-api-access-vbpt9\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.209759 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7b0929ad-736b-47b7-8868-99450e21af32-config-data-custom\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.216908 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4697809d-6d20-495e-a2bb-9b39edb2a09c-config-data-custom\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.217479 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4697809d-6d20-495e-a2bb-9b39edb2a09c-logs\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.217896 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.218214 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4697809d-6d20-495e-a2bb-9b39edb2a09c-combined-ca-bundle\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.218989 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4697809d-6d20-495e-a2bb-9b39edb2a09c-config-data\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.248409 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbpt9\" (UniqueName: \"kubernetes.io/projected/4697809d-6d20-495e-a2bb-9b39edb2a09c-kube-api-access-vbpt9\") pod \"barbican-keystone-listener-67f646cf6d-bpxc7\" (UID: \"4697809d-6d20-495e-a2bb-9b39edb2a09c\") " pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.274630 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-787c747fb6-jxmjq"] Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.276538 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.278761 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.305728 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-787c747fb6-jxmjq"] Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.311375 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-ovsdbserver-nb\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.311439 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b0929ad-736b-47b7-8868-99450e21af32-config-data\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.311510 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b0929ad-736b-47b7-8868-99450e21af32-logs\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.311534 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-dns-swift-storage-0\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.311569 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk8tt\" (UniqueName: \"kubernetes.io/projected/7b0929ad-736b-47b7-8868-99450e21af32-kube-api-access-bk8tt\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.311588 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdhdp\" (UniqueName: \"kubernetes.io/projected/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-kube-api-access-gdhdp\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.311615 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-ovsdbserver-sb\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.311636 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-dns-svc\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.311660 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b0929ad-736b-47b7-8868-99450e21af32-combined-ca-bundle\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.311692 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7b0929ad-736b-47b7-8868-99450e21af32-config-data-custom\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.311711 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-config\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.313198 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b0929ad-736b-47b7-8868-99450e21af32-logs\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.317771 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b0929ad-736b-47b7-8868-99450e21af32-config-data\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.318916 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b0929ad-736b-47b7-8868-99450e21af32-combined-ca-bundle\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.336198 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7b0929ad-736b-47b7-8868-99450e21af32-config-data-custom\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.337164 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.344616 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk8tt\" (UniqueName: \"kubernetes.io/projected/7b0929ad-736b-47b7-8868-99450e21af32-kube-api-access-bk8tt\") pod \"barbican-worker-67f588f949-sxst6\" (UID: \"7b0929ad-736b-47b7-8868-99450e21af32\") " pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.372287 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-67f588f949-sxst6" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.413023 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-dns-swift-storage-0\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.413145 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdhdp\" (UniqueName: \"kubernetes.io/projected/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-kube-api-access-gdhdp\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.413181 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-ovsdbserver-sb\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.413198 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-dns-svc\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.413234 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-config-data-custom\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.413268 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-config\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.413295 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-config-data\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.413324 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-combined-ca-bundle\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.413354 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-ovsdbserver-nb\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.413383 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-logs\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.413471 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlgr8\" (UniqueName: \"kubernetes.io/projected/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-kube-api-access-tlgr8\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.415109 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-config\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.415746 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-dns-swift-storage-0\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.416874 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-ovsdbserver-sb\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.416960 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-ovsdbserver-nb\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.417445 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-dns-svc\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.441709 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdhdp\" (UniqueName: \"kubernetes.io/projected/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-kube-api-access-gdhdp\") pod \"dnsmasq-dns-586bdc5f9-mczwp\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.516906 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-config-data-custom\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.517068 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-config-data\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.517115 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-combined-ca-bundle\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.517210 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-logs\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.517392 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlgr8\" (UniqueName: \"kubernetes.io/projected/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-kube-api-access-tlgr8\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.519100 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-logs\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.521565 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-config-data-custom\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.523740 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-combined-ca-bundle\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.524502 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-config-data\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.535149 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlgr8\" (UniqueName: \"kubernetes.io/projected/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-kube-api-access-tlgr8\") pod \"barbican-api-787c747fb6-jxmjq\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.662305 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.672831 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.803194 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-d797fb44f-6g7nm"] Dec 08 21:41:28 crc kubenswrapper[4912]: I1208 21:41:28.976582 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-67f646cf6d-bpxc7"] Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.004719 4912 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.096021 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-67f588f949-sxst6"] Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.217134 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-mczwp"] Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.365853 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-787c747fb6-jxmjq"] Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.740090 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-67f588f949-sxst6" event={"ID":"7b0929ad-736b-47b7-8868-99450e21af32","Type":"ContainerStarted","Data":"e40ac1e228f7915b9e6c5d6fb8d221ae1ee92bc709222cbd1b03f84c85bdc293"} Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.741206 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-787c747fb6-jxmjq" event={"ID":"2cec7604-be34-4a76-bee3-ddbb8a1b7a28","Type":"ContainerStarted","Data":"a57cf77cc1b3d0965967577d4f265f825ec16280440fe1641aa4565dd283842a"} Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.741253 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-787c747fb6-jxmjq" event={"ID":"2cec7604-be34-4a76-bee3-ddbb8a1b7a28","Type":"ContainerStarted","Data":"b98915b2ac01025d19dc425894d79c183ea00595f1a0dba7775f352f425670e0"} Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.742793 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-d797fb44f-6g7nm" event={"ID":"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b","Type":"ContainerStarted","Data":"3da2e7c3258adac381ceb1ebab6d506aca7aca5edccdc5599c4a45cf59c7f01a"} Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.742818 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-d797fb44f-6g7nm" event={"ID":"de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b","Type":"ContainerStarted","Data":"2943693a551a390d0834ede023759d3e9e5af992a2ce11d8d9755b106c5f0e56"} Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.743898 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.745527 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" event={"ID":"4697809d-6d20-495e-a2bb-9b39edb2a09c","Type":"ContainerStarted","Data":"5435bf2699c18899016395c8fee9efd95ef64ad9e848ba89841401cf7513565e"} Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.747934 4912 generic.go:334] "Generic (PLEG): container finished" podID="d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" containerID="10e00959b53f9ab07b2b7e2a1131fc9d8ce67327c89c4e281ce9192e3ddc2ac1" exitCode=0 Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.747965 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" event={"ID":"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c","Type":"ContainerDied","Data":"10e00959b53f9ab07b2b7e2a1131fc9d8ce67327c89c4e281ce9192e3ddc2ac1"} Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.747984 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" event={"ID":"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c","Type":"ContainerStarted","Data":"fb3ccccf010e97cf30d93f6d6eb21daee7eba4b427786c5b674dc2e8dada22a1"} Dec 08 21:41:29 crc kubenswrapper[4912]: I1208 21:41:29.766429 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-d797fb44f-6g7nm" podStartSLOduration=2.766413159 podStartE2EDuration="2.766413159s" podCreationTimestamp="2025-12-08 21:41:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:29.763641438 +0000 UTC m=+1371.626643521" watchObservedRunningTime="2025-12-08 21:41:29.766413159 +0000 UTC m=+1371.629415242" Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.074534 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.074572 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.101839 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.111573 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.763789 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-787c747fb6-jxmjq" event={"ID":"2cec7604-be34-4a76-bee3-ddbb8a1b7a28","Type":"ContainerStarted","Data":"6826d06f22c158b03c6b62a8f2a97416e4e959205c555ed5c96c7768d1ea5787"} Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.764237 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.764267 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.770358 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" event={"ID":"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c","Type":"ContainerStarted","Data":"79e797e31d3691b81ba614ebfeeec6588bdf7dcb7d8a809d329368c5b30d2db0"} Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.771260 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.772017 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.772060 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.834777 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-787c747fb6-jxmjq" podStartSLOduration=2.834752774 podStartE2EDuration="2.834752774s" podCreationTimestamp="2025-12-08 21:41:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:30.802419304 +0000 UTC m=+1372.665421397" watchObservedRunningTime="2025-12-08 21:41:30.834752774 +0000 UTC m=+1372.697754857" Dec 08 21:41:30 crc kubenswrapper[4912]: I1208 21:41:30.844507 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" podStartSLOduration=2.844492854 podStartE2EDuration="2.844492854s" podCreationTimestamp="2025-12-08 21:41:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:30.842708188 +0000 UTC m=+1372.705710271" watchObservedRunningTime="2025-12-08 21:41:30.844492854 +0000 UTC m=+1372.707494937" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.026535 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.026587 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.064013 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.103453 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.527379 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-696dcdf5fd-4l78s"] Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.530948 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.534940 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-696dcdf5fd-4l78s"] Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.539615 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.539807 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.717875 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-public-tls-certs\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.717939 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-config-data\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.717972 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-combined-ca-bundle\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.717999 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f7341d1-a217-4082-9610-1f882c55186d-logs\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.722292 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-config-data-custom\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.722597 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cj59b\" (UniqueName: \"kubernetes.io/projected/1f7341d1-a217-4082-9610-1f882c55186d-kube-api-access-cj59b\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.722621 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-internal-tls-certs\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.783031 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.783084 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.824492 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-combined-ca-bundle\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.824572 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f7341d1-a217-4082-9610-1f882c55186d-logs\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.824632 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-config-data-custom\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.824714 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cj59b\" (UniqueName: \"kubernetes.io/projected/1f7341d1-a217-4082-9610-1f882c55186d-kube-api-access-cj59b\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.824737 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-internal-tls-certs\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.824893 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-public-tls-certs\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.824937 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-config-data\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.825646 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f7341d1-a217-4082-9610-1f882c55186d-logs\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.836182 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-public-tls-certs\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.836331 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-config-data-custom\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.836872 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-internal-tls-certs\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.839954 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-combined-ca-bundle\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.841367 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f7341d1-a217-4082-9610-1f882c55186d-config-data\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.851898 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cj59b\" (UniqueName: \"kubernetes.io/projected/1f7341d1-a217-4082-9610-1f882c55186d-kube-api-access-cj59b\") pod \"barbican-api-696dcdf5fd-4l78s\" (UID: \"1f7341d1-a217-4082-9610-1f882c55186d\") " pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:31 crc kubenswrapper[4912]: I1208 21:41:31.871851 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:32 crc kubenswrapper[4912]: I1208 21:41:32.790007 4912 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:41:32 crc kubenswrapper[4912]: I1208 21:41:32.790063 4912 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:41:33 crc kubenswrapper[4912]: I1208 21:41:33.754466 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:33 crc kubenswrapper[4912]: I1208 21:41:33.803225 4912 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:41:33 crc kubenswrapper[4912]: I1208 21:41:33.803554 4912 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:41:33 crc kubenswrapper[4912]: I1208 21:41:33.803579 4912 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:41:33 crc kubenswrapper[4912]: I1208 21:41:33.818871 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.081656 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-696dcdf5fd-4l78s"] Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.764499 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.804306 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.817353 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-696dcdf5fd-4l78s" event={"ID":"1f7341d1-a217-4082-9610-1f882c55186d","Type":"ContainerStarted","Data":"0d990cdc0c5c1a3b094dfd9acae1c374db6c417846ec2c3211805043d35c00c0"} Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.817412 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-696dcdf5fd-4l78s" event={"ID":"1f7341d1-a217-4082-9610-1f882c55186d","Type":"ContainerStarted","Data":"5cc1c4b46cee6eab23d2adb3021743455af2598e42c45103339ff158a048cb00"} Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.822296 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-67f588f949-sxst6" event={"ID":"7b0929ad-736b-47b7-8868-99450e21af32","Type":"ContainerStarted","Data":"f062b11adcdfd585b6630304c1c2559526dc04531deca983b19b0f8695e67fe6"} Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.822332 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-67f588f949-sxst6" event={"ID":"7b0929ad-736b-47b7-8868-99450e21af32","Type":"ContainerStarted","Data":"e867ab0167eb7f9531ee73e6e3db4f72be1c37385807d44bf423679a5d9f8d93"} Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.824552 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-djggl" event={"ID":"06e69c2b-f54a-466a-9f5c-60499b4f5123","Type":"ContainerStarted","Data":"e1da749d342be1c8c839d40aa840357ec1ab99d7b6c02ace1d3441d8c709046d"} Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.829858 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" event={"ID":"4697809d-6d20-495e-a2bb-9b39edb2a09c","Type":"ContainerStarted","Data":"42cae6a792241ff293196a86175b344f0f6a08af6c418e31b538fae4254a746e"} Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.829916 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" event={"ID":"4697809d-6d20-495e-a2bb-9b39edb2a09c","Type":"ContainerStarted","Data":"457d24440fc1331e0f4bd062d0ed44b397bd2798090df8a8a48673fc7eeafb03"} Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.852848 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-djggl" podStartSLOduration=3.212300094 podStartE2EDuration="39.852830876s" podCreationTimestamp="2025-12-08 21:40:55 +0000 UTC" firstStartedPulling="2025-12-08 21:40:56.863675492 +0000 UTC m=+1338.726677565" lastFinishedPulling="2025-12-08 21:41:33.504206264 +0000 UTC m=+1375.367208347" observedRunningTime="2025-12-08 21:41:34.846948935 +0000 UTC m=+1376.709951018" watchObservedRunningTime="2025-12-08 21:41:34.852830876 +0000 UTC m=+1376.715832959" Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.882095 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-67f588f949-sxst6" podStartSLOduration=3.490677678 podStartE2EDuration="7.882077807s" podCreationTimestamp="2025-12-08 21:41:27 +0000 UTC" firstStartedPulling="2025-12-08 21:41:29.117741992 +0000 UTC m=+1370.980744075" lastFinishedPulling="2025-12-08 21:41:33.509142121 +0000 UTC m=+1375.372144204" observedRunningTime="2025-12-08 21:41:34.879539752 +0000 UTC m=+1376.742541845" watchObservedRunningTime="2025-12-08 21:41:34.882077807 +0000 UTC m=+1376.745079890" Dec 08 21:41:34 crc kubenswrapper[4912]: I1208 21:41:34.926449 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-67f646cf6d-bpxc7" podStartSLOduration=3.430146044 podStartE2EDuration="7.926424966s" podCreationTimestamp="2025-12-08 21:41:27 +0000 UTC" firstStartedPulling="2025-12-08 21:41:29.004129334 +0000 UTC m=+1370.867131417" lastFinishedPulling="2025-12-08 21:41:33.500408256 +0000 UTC m=+1375.363410339" observedRunningTime="2025-12-08 21:41:34.904448962 +0000 UTC m=+1376.767451045" watchObservedRunningTime="2025-12-08 21:41:34.926424966 +0000 UTC m=+1376.789427049" Dec 08 21:41:35 crc kubenswrapper[4912]: I1208 21:41:35.845685 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-696dcdf5fd-4l78s" event={"ID":"1f7341d1-a217-4082-9610-1f882c55186d","Type":"ContainerStarted","Data":"83e56c5729e3ded3da9ac84ef31ac8e295740929e335972254f28451e792d01b"} Dec 08 21:41:35 crc kubenswrapper[4912]: I1208 21:41:35.878832 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-696dcdf5fd-4l78s" podStartSLOduration=4.878798671 podStartE2EDuration="4.878798671s" podCreationTimestamp="2025-12-08 21:41:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:35.866548957 +0000 UTC m=+1377.729551040" watchObservedRunningTime="2025-12-08 21:41:35.878798671 +0000 UTC m=+1377.741800754" Dec 08 21:41:36 crc kubenswrapper[4912]: I1208 21:41:36.858980 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:36 crc kubenswrapper[4912]: I1208 21:41:36.859019 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:38 crc kubenswrapper[4912]: I1208 21:41:38.664224 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:38 crc kubenswrapper[4912]: I1208 21:41:38.741660 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4qv76"] Dec 08 21:41:38 crc kubenswrapper[4912]: I1208 21:41:38.741940 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" podUID="d69c8959-7562-415d-bd31-6f8ed45750be" containerName="dnsmasq-dns" containerID="cri-o://74571c32809c409cf954e63974689352e644da009509bbbc2b6de008c25e8a31" gracePeriod=10 Dec 08 21:41:38 crc kubenswrapper[4912]: I1208 21:41:38.888794 4912 generic.go:334] "Generic (PLEG): container finished" podID="d69c8959-7562-415d-bd31-6f8ed45750be" containerID="74571c32809c409cf954e63974689352e644da009509bbbc2b6de008c25e8a31" exitCode=0 Dec 08 21:41:38 crc kubenswrapper[4912]: I1208 21:41:38.888868 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" event={"ID":"d69c8959-7562-415d-bd31-6f8ed45750be","Type":"ContainerDied","Data":"74571c32809c409cf954e63974689352e644da009509bbbc2b6de008c25e8a31"} Dec 08 21:41:38 crc kubenswrapper[4912]: I1208 21:41:38.891081 4912 generic.go:334] "Generic (PLEG): container finished" podID="f4574c96-aa92-4621-92e2-d8ee041d94c8" containerID="34432d647fab3a38b1d37480eb79f732e8f8464ca53533ee285cf5d5eedbbd60" exitCode=0 Dec 08 21:41:38 crc kubenswrapper[4912]: I1208 21:41:38.891389 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4xrcd" event={"ID":"f4574c96-aa92-4621-92e2-d8ee041d94c8","Type":"ContainerDied","Data":"34432d647fab3a38b1d37480eb79f732e8f8464ca53533ee285cf5d5eedbbd60"} Dec 08 21:41:38 crc kubenswrapper[4912]: I1208 21:41:38.957785 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" podUID="d69c8959-7562-415d-bd31-6f8ed45750be" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.146:5353: connect: connection refused" Dec 08 21:41:39 crc kubenswrapper[4912]: E1208 21:41:39.031572 4912 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4574c96_aa92_4621_92e2_d8ee041d94c8.slice/crio-conmon-34432d647fab3a38b1d37480eb79f732e8f8464ca53533ee285cf5d5eedbbd60.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd69c8959_7562_415d_bd31_6f8ed45750be.slice/crio-conmon-74571c32809c409cf954e63974689352e644da009509bbbc2b6de008c25e8a31.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd69c8959_7562_415d_bd31_6f8ed45750be.slice/crio-74571c32809c409cf954e63974689352e644da009509bbbc2b6de008c25e8a31.scope\": RecentStats: unable to find data in memory cache]" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.238139 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.315686 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-ovsdbserver-sb\") pod \"d69c8959-7562-415d-bd31-6f8ed45750be\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.315875 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sh7dw\" (UniqueName: \"kubernetes.io/projected/d69c8959-7562-415d-bd31-6f8ed45750be-kube-api-access-sh7dw\") pod \"d69c8959-7562-415d-bd31-6f8ed45750be\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.315988 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-config\") pod \"d69c8959-7562-415d-bd31-6f8ed45750be\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.316071 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-ovsdbserver-nb\") pod \"d69c8959-7562-415d-bd31-6f8ed45750be\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.316099 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-dns-swift-storage-0\") pod \"d69c8959-7562-415d-bd31-6f8ed45750be\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.316127 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-dns-svc\") pod \"d69c8959-7562-415d-bd31-6f8ed45750be\" (UID: \"d69c8959-7562-415d-bd31-6f8ed45750be\") " Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.340164 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d69c8959-7562-415d-bd31-6f8ed45750be-kube-api-access-sh7dw" (OuterVolumeSpecName: "kube-api-access-sh7dw") pod "d69c8959-7562-415d-bd31-6f8ed45750be" (UID: "d69c8959-7562-415d-bd31-6f8ed45750be"). InnerVolumeSpecName "kube-api-access-sh7dw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.375929 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-config" (OuterVolumeSpecName: "config") pod "d69c8959-7562-415d-bd31-6f8ed45750be" (UID: "d69c8959-7562-415d-bd31-6f8ed45750be"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.377071 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d69c8959-7562-415d-bd31-6f8ed45750be" (UID: "d69c8959-7562-415d-bd31-6f8ed45750be"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.388438 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d69c8959-7562-415d-bd31-6f8ed45750be" (UID: "d69c8959-7562-415d-bd31-6f8ed45750be"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.389511 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d69c8959-7562-415d-bd31-6f8ed45750be" (UID: "d69c8959-7562-415d-bd31-6f8ed45750be"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.415850 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d69c8959-7562-415d-bd31-6f8ed45750be" (UID: "d69c8959-7562-415d-bd31-6f8ed45750be"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.418867 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.418895 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.418908 4912 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.418920 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.418948 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d69c8959-7562-415d-bd31-6f8ed45750be-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.418957 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sh7dw\" (UniqueName: \"kubernetes.io/projected/d69c8959-7562-415d-bd31-6f8ed45750be-kube-api-access-sh7dw\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.923108 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.923143 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4qv76" event={"ID":"d69c8959-7562-415d-bd31-6f8ed45750be","Type":"ContainerDied","Data":"30bf3b30da98cde711862e6e06d6f0dfb1a2ea13de4750074e71cd3273947595"} Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.923593 4912 scope.go:117] "RemoveContainer" containerID="74571c32809c409cf954e63974689352e644da009509bbbc2b6de008c25e8a31" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.954460 4912 scope.go:117] "RemoveContainer" containerID="4cea9222afba6a0b0bd1b4a51d769eb1aae06c76168ebc3e200ff8b3951fa8d9" Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.957114 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4qv76"] Dec 08 21:41:39 crc kubenswrapper[4912]: I1208 21:41:39.967374 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4qv76"] Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.291377 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.335890 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4574c96-aa92-4621-92e2-d8ee041d94c8-config\") pod \"f4574c96-aa92-4621-92e2-d8ee041d94c8\" (UID: \"f4574c96-aa92-4621-92e2-d8ee041d94c8\") " Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.336000 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hts7w\" (UniqueName: \"kubernetes.io/projected/f4574c96-aa92-4621-92e2-d8ee041d94c8-kube-api-access-hts7w\") pod \"f4574c96-aa92-4621-92e2-d8ee041d94c8\" (UID: \"f4574c96-aa92-4621-92e2-d8ee041d94c8\") " Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.336330 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4574c96-aa92-4621-92e2-d8ee041d94c8-combined-ca-bundle\") pod \"f4574c96-aa92-4621-92e2-d8ee041d94c8\" (UID: \"f4574c96-aa92-4621-92e2-d8ee041d94c8\") " Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.356304 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4574c96-aa92-4621-92e2-d8ee041d94c8-kube-api-access-hts7w" (OuterVolumeSpecName: "kube-api-access-hts7w") pod "f4574c96-aa92-4621-92e2-d8ee041d94c8" (UID: "f4574c96-aa92-4621-92e2-d8ee041d94c8"). InnerVolumeSpecName "kube-api-access-hts7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.364486 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4574c96-aa92-4621-92e2-d8ee041d94c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4574c96-aa92-4621-92e2-d8ee041d94c8" (UID: "f4574c96-aa92-4621-92e2-d8ee041d94c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.374982 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4574c96-aa92-4621-92e2-d8ee041d94c8-config" (OuterVolumeSpecName: "config") pod "f4574c96-aa92-4621-92e2-d8ee041d94c8" (UID: "f4574c96-aa92-4621-92e2-d8ee041d94c8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.439624 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4574c96-aa92-4621-92e2-d8ee041d94c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.439654 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4574c96-aa92-4621-92e2-d8ee041d94c8-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.439663 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hts7w\" (UniqueName: \"kubernetes.io/projected/f4574c96-aa92-4621-92e2-d8ee041d94c8-kube-api-access-hts7w\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.444152 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d69c8959-7562-415d-bd31-6f8ed45750be" path="/var/lib/kubelet/pods/d69c8959-7562-415d-bd31-6f8ed45750be/volumes" Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.511811 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.572130 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.941268 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4xrcd" event={"ID":"f4574c96-aa92-4621-92e2-d8ee041d94c8","Type":"ContainerDied","Data":"5a3b569acec4fd654fffe9929087b149fdd599d1e62bca7939e9daf8982bac01"} Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.941316 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a3b569acec4fd654fffe9929087b149fdd599d1e62bca7939e9daf8982bac01" Dec 08 21:41:40 crc kubenswrapper[4912]: I1208 21:41:40.941291 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4xrcd" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.221080 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-nxvqk"] Dec 08 21:41:41 crc kubenswrapper[4912]: E1208 21:41:41.221494 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4574c96-aa92-4621-92e2-d8ee041d94c8" containerName="neutron-db-sync" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.221510 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4574c96-aa92-4621-92e2-d8ee041d94c8" containerName="neutron-db-sync" Dec 08 21:41:41 crc kubenswrapper[4912]: E1208 21:41:41.221523 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d69c8959-7562-415d-bd31-6f8ed45750be" containerName="dnsmasq-dns" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.221530 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="d69c8959-7562-415d-bd31-6f8ed45750be" containerName="dnsmasq-dns" Dec 08 21:41:41 crc kubenswrapper[4912]: E1208 21:41:41.221545 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d69c8959-7562-415d-bd31-6f8ed45750be" containerName="init" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.221552 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="d69c8959-7562-415d-bd31-6f8ed45750be" containerName="init" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.221726 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4574c96-aa92-4621-92e2-d8ee041d94c8" containerName="neutron-db-sync" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.221751 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="d69c8959-7562-415d-bd31-6f8ed45750be" containerName="dnsmasq-dns" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.222726 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.244691 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-nxvqk"] Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.336490 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-688d6dd778-96vcb"] Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.338303 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.340325 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.341411 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.342583 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-56lcr" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.343914 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.353684 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-688d6dd778-96vcb"] Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.357711 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-config\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.357766 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.357867 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.357894 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-dns-svc\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.357914 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8zc5\" (UniqueName: \"kubernetes.io/projected/f8345d4c-d0b8-40ea-99e5-00dacda84662-kube-api-access-d8zc5\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.357940 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.459775 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-config\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.459839 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.459876 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn9fl\" (UniqueName: \"kubernetes.io/projected/6a6ad257-bd45-428d-9010-7bc9e98cd463-kube-api-access-nn9fl\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.459961 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-ovndb-tls-certs\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.459982 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-httpd-config\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.460001 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.460021 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-dns-svc\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.460054 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8zc5\" (UniqueName: \"kubernetes.io/projected/f8345d4c-d0b8-40ea-99e5-00dacda84662-kube-api-access-d8zc5\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.460078 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.460103 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-combined-ca-bundle\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.461098 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.461139 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-dns-svc\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.461277 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-config\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.461714 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.461865 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.461949 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-config\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.495301 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8zc5\" (UniqueName: \"kubernetes.io/projected/f8345d4c-d0b8-40ea-99e5-00dacda84662-kube-api-access-d8zc5\") pod \"dnsmasq-dns-85ff748b95-nxvqk\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.548621 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.562933 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-combined-ca-bundle\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.563010 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-config\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.563084 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn9fl\" (UniqueName: \"kubernetes.io/projected/6a6ad257-bd45-428d-9010-7bc9e98cd463-kube-api-access-nn9fl\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.563159 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-ovndb-tls-certs\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.563178 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-httpd-config\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.568143 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-httpd-config\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.582740 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-ovndb-tls-certs\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.582972 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-combined-ca-bundle\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.583552 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-config\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.590600 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn9fl\" (UniqueName: \"kubernetes.io/projected/6a6ad257-bd45-428d-9010-7bc9e98cd463-kube-api-access-nn9fl\") pod \"neutron-688d6dd778-96vcb\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.656779 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.973476 4912 generic.go:334] "Generic (PLEG): container finished" podID="06e69c2b-f54a-466a-9f5c-60499b4f5123" containerID="e1da749d342be1c8c839d40aa840357ec1ab99d7b6c02ace1d3441d8c709046d" exitCode=0 Dec 08 21:41:41 crc kubenswrapper[4912]: I1208 21:41:41.973884 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-djggl" event={"ID":"06e69c2b-f54a-466a-9f5c-60499b4f5123","Type":"ContainerDied","Data":"e1da749d342be1c8c839d40aa840357ec1ab99d7b6c02ace1d3441d8c709046d"} Dec 08 21:41:42 crc kubenswrapper[4912]: I1208 21:41:42.144583 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-nxvqk"] Dec 08 21:41:42 crc kubenswrapper[4912]: W1208 21:41:42.149368 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8345d4c_d0b8_40ea_99e5_00dacda84662.slice/crio-beb62da05881d45d487a6f6d0d0d25cb45df8caddb813fae307ea00fd677367c WatchSource:0}: Error finding container beb62da05881d45d487a6f6d0d0d25cb45df8caddb813fae307ea00fd677367c: Status 404 returned error can't find the container with id beb62da05881d45d487a6f6d0d0d25cb45df8caddb813fae307ea00fd677367c Dec 08 21:41:42 crc kubenswrapper[4912]: I1208 21:41:42.502599 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-688d6dd778-96vcb"] Dec 08 21:41:42 crc kubenswrapper[4912]: I1208 21:41:42.995754 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-688d6dd778-96vcb" event={"ID":"6a6ad257-bd45-428d-9010-7bc9e98cd463","Type":"ContainerStarted","Data":"46bdf0e0c8682b0558c86c0c24c8efa3c607dd557b34f4c6db9a514f63f4529a"} Dec 08 21:41:42 crc kubenswrapper[4912]: I1208 21:41:42.996134 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-688d6dd778-96vcb" event={"ID":"6a6ad257-bd45-428d-9010-7bc9e98cd463","Type":"ContainerStarted","Data":"454abebeaab9df0133819e21c2c78fc103362065cef501ef02df6c79d4120ed6"} Dec 08 21:41:42 crc kubenswrapper[4912]: I1208 21:41:42.997421 4912 generic.go:334] "Generic (PLEG): container finished" podID="f8345d4c-d0b8-40ea-99e5-00dacda84662" containerID="c700629cb529d643f9178939670c098e57173b25f65c845453ebd502d9afc65f" exitCode=0 Dec 08 21:41:42 crc kubenswrapper[4912]: I1208 21:41:42.998632 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" event={"ID":"f8345d4c-d0b8-40ea-99e5-00dacda84662","Type":"ContainerDied","Data":"c700629cb529d643f9178939670c098e57173b25f65c845453ebd502d9afc65f"} Dec 08 21:41:42 crc kubenswrapper[4912]: I1208 21:41:42.998685 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" event={"ID":"f8345d4c-d0b8-40ea-99e5-00dacda84662","Type":"ContainerStarted","Data":"beb62da05881d45d487a6f6d0d0d25cb45df8caddb813fae307ea00fd677367c"} Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.452713 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-djggl" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.526635 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-scripts\") pod \"06e69c2b-f54a-466a-9f5c-60499b4f5123\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.526735 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-db-sync-config-data\") pod \"06e69c2b-f54a-466a-9f5c-60499b4f5123\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.526822 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qpqm\" (UniqueName: \"kubernetes.io/projected/06e69c2b-f54a-466a-9f5c-60499b4f5123-kube-api-access-8qpqm\") pod \"06e69c2b-f54a-466a-9f5c-60499b4f5123\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.526864 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-combined-ca-bundle\") pod \"06e69c2b-f54a-466a-9f5c-60499b4f5123\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.526920 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-config-data\") pod \"06e69c2b-f54a-466a-9f5c-60499b4f5123\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.527235 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/06e69c2b-f54a-466a-9f5c-60499b4f5123-etc-machine-id\") pod \"06e69c2b-f54a-466a-9f5c-60499b4f5123\" (UID: \"06e69c2b-f54a-466a-9f5c-60499b4f5123\") " Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.527699 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06e69c2b-f54a-466a-9f5c-60499b4f5123-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "06e69c2b-f54a-466a-9f5c-60499b4f5123" (UID: "06e69c2b-f54a-466a-9f5c-60499b4f5123"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.541794 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "06e69c2b-f54a-466a-9f5c-60499b4f5123" (UID: "06e69c2b-f54a-466a-9f5c-60499b4f5123"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.547243 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-scripts" (OuterVolumeSpecName: "scripts") pod "06e69c2b-f54a-466a-9f5c-60499b4f5123" (UID: "06e69c2b-f54a-466a-9f5c-60499b4f5123"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.558363 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06e69c2b-f54a-466a-9f5c-60499b4f5123-kube-api-access-8qpqm" (OuterVolumeSpecName: "kube-api-access-8qpqm") pod "06e69c2b-f54a-466a-9f5c-60499b4f5123" (UID: "06e69c2b-f54a-466a-9f5c-60499b4f5123"). InnerVolumeSpecName "kube-api-access-8qpqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.595261 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06e69c2b-f54a-466a-9f5c-60499b4f5123" (UID: "06e69c2b-f54a-466a-9f5c-60499b4f5123"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.631250 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.631284 4912 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.631296 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qpqm\" (UniqueName: \"kubernetes.io/projected/06e69c2b-f54a-466a-9f5c-60499b4f5123-kube-api-access-8qpqm\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.631306 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.631315 4912 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/06e69c2b-f54a-466a-9f5c-60499b4f5123-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.675112 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-config-data" (OuterVolumeSpecName: "config-data") pod "06e69c2b-f54a-466a-9f5c-60499b4f5123" (UID: "06e69c2b-f54a-466a-9f5c-60499b4f5123"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.732762 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06e69c2b-f54a-466a-9f5c-60499b4f5123-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:43 crc kubenswrapper[4912]: I1208 21:41:43.977268 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.009889 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" event={"ID":"f8345d4c-d0b8-40ea-99e5-00dacda84662","Type":"ContainerStarted","Data":"de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac"} Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.010046 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.012141 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-djggl" event={"ID":"06e69c2b-f54a-466a-9f5c-60499b4f5123","Type":"ContainerDied","Data":"eb292047f3bf2c6ab58cff4b1f24da96c4054728782358a94fc6d8ce00a7fcfa"} Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.012184 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb292047f3bf2c6ab58cff4b1f24da96c4054728782358a94fc6d8ce00a7fcfa" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.012247 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-djggl" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.017157 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-688d6dd778-96vcb" event={"ID":"6a6ad257-bd45-428d-9010-7bc9e98cd463","Type":"ContainerStarted","Data":"8e8d05a39de6569a5bad68e352ba97aa7e0d09d3dbb6c332f0e86f04b68c88d0"} Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.017469 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.049510 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" podStartSLOduration=3.049491591 podStartE2EDuration="3.049491591s" podCreationTimestamp="2025-12-08 21:41:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:44.034771183 +0000 UTC m=+1385.897773266" watchObservedRunningTime="2025-12-08 21:41:44.049491591 +0000 UTC m=+1385.912493674" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.070148 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-688d6dd778-96vcb" podStartSLOduration=3.070125911 podStartE2EDuration="3.070125911s" podCreationTimestamp="2025-12-08 21:41:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:44.059449926 +0000 UTC m=+1385.922452009" watchObservedRunningTime="2025-12-08 21:41:44.070125911 +0000 UTC m=+1385.933127994" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.290773 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:41:44 crc kubenswrapper[4912]: E1208 21:41:44.291290 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06e69c2b-f54a-466a-9f5c-60499b4f5123" containerName="cinder-db-sync" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.291312 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="06e69c2b-f54a-466a-9f5c-60499b4f5123" containerName="cinder-db-sync" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.291600 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="06e69c2b-f54a-466a-9f5c-60499b4f5123" containerName="cinder-db-sync" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.292932 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.305612 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.305847 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.305987 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-jqk94" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.306175 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.320369 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.446009 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-nxvqk"] Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.447135 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.447200 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvmns\" (UniqueName: \"kubernetes.io/projected/6c698b2d-df42-4919-a957-79b6deae788d-kube-api-access-zvmns\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.447287 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-scripts\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.447306 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c698b2d-df42-4919-a957-79b6deae788d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.447336 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-config-data\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.447353 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.488667 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-8mgnb"] Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.491382 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.517097 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-8mgnb"] Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.549402 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-scripts\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.549456 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c698b2d-df42-4919-a957-79b6deae788d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.549499 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-config-data\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.549522 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.549552 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.549623 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvmns\" (UniqueName: \"kubernetes.io/projected/6c698b2d-df42-4919-a957-79b6deae788d-kube-api-access-zvmns\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.554679 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c698b2d-df42-4919-a957-79b6deae788d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.558025 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-scripts\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.560139 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-config-data\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.561635 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.566734 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.585747 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvmns\" (UniqueName: \"kubernetes.io/projected/6c698b2d-df42-4919-a957-79b6deae788d-kube-api-access-zvmns\") pod \"cinder-scheduler-0\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.609529 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.624619 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.629177 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.629712 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.632239 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.659546 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.659603 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjm2k\" (UniqueName: \"kubernetes.io/projected/c4df2822-3cb8-47b8-b06e-15601ceb80ca-kube-api-access-kjm2k\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.659653 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-config\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.659681 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.660020 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.660289 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.762512 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r976r\" (UniqueName: \"kubernetes.io/projected/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-kube-api-access-r976r\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.763055 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.763091 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-logs\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.763122 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.763149 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-config-data-custom\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.763181 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjm2k\" (UniqueName: \"kubernetes.io/projected/c4df2822-3cb8-47b8-b06e-15601ceb80ca-kube-api-access-kjm2k\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.763236 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-config\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.763270 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.763339 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.763369 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-config-data\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.763408 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-scripts\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.763471 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.763516 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.764173 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-696dcdf5fd-4l78s" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.764957 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.764964 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.765005 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.766312 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-config\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.766623 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.790931 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjm2k\" (UniqueName: \"kubernetes.io/projected/c4df2822-3cb8-47b8-b06e-15601ceb80ca-kube-api-access-kjm2k\") pod \"dnsmasq-dns-5c9776ccc5-8mgnb\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.823564 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.856052 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-787c747fb6-jxmjq"] Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.856320 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-787c747fb6-jxmjq" podUID="2cec7604-be34-4a76-bee3-ddbb8a1b7a28" containerName="barbican-api-log" containerID="cri-o://a57cf77cc1b3d0965967577d4f265f825ec16280440fe1641aa4565dd283842a" gracePeriod=30 Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.856673 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-787c747fb6-jxmjq" podUID="2cec7604-be34-4a76-bee3-ddbb8a1b7a28" containerName="barbican-api" containerID="cri-o://6826d06f22c158b03c6b62a8f2a97416e4e959205c555ed5c96c7768d1ea5787" gracePeriod=30 Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.864953 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-scripts\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.865109 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.865143 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r976r\" (UniqueName: \"kubernetes.io/projected/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-kube-api-access-r976r\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.865186 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-logs\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.865216 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-config-data-custom\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.865306 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.865327 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-config-data\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.865466 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.866700 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-logs\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.872234 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-config-data\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.876335 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-scripts\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.877026 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-config-data-custom\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:44 crc kubenswrapper[4912]: I1208 21:41:44.887898 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:45 crc kubenswrapper[4912]: I1208 21:41:45.048517 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r976r\" (UniqueName: \"kubernetes.io/projected/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-kube-api-access-r976r\") pod \"cinder-api-0\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " pod="openstack/cinder-api-0" Dec 08 21:41:45 crc kubenswrapper[4912]: I1208 21:41:45.072616 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 08 21:41:45 crc kubenswrapper[4912]: I1208 21:41:45.207327 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:41:45 crc kubenswrapper[4912]: W1208 21:41:45.261752 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c698b2d_df42_4919_a957_79b6deae788d.slice/crio-74fd3ecade2b68c8c69b2742fed663dc008b7b2291b084a5006c5e2681fcf394 WatchSource:0}: Error finding container 74fd3ecade2b68c8c69b2742fed663dc008b7b2291b084a5006c5e2681fcf394: Status 404 returned error can't find the container with id 74fd3ecade2b68c8c69b2742fed663dc008b7b2291b084a5006c5e2681fcf394 Dec 08 21:41:45 crc kubenswrapper[4912]: I1208 21:41:45.668534 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-8mgnb"] Dec 08 21:41:45 crc kubenswrapper[4912]: W1208 21:41:45.680327 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d0fbbfe_8e23_4a55_b432_af0f17b5511d.slice/crio-10236a5a4e6a6c64f0c57dc71d78ef4cebdb7d6e3dc51684c9575fd9c26b8662 WatchSource:0}: Error finding container 10236a5a4e6a6c64f0c57dc71d78ef4cebdb7d6e3dc51684c9575fd9c26b8662: Status 404 returned error can't find the container with id 10236a5a4e6a6c64f0c57dc71d78ef4cebdb7d6e3dc51684c9575fd9c26b8662 Dec 08 21:41:45 crc kubenswrapper[4912]: I1208 21:41:45.687308 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:41:46 crc kubenswrapper[4912]: I1208 21:41:46.150564 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" event={"ID":"c4df2822-3cb8-47b8-b06e-15601ceb80ca","Type":"ContainerStarted","Data":"93aa3389a0381d8036b9b8c7fdfb428e60ffab9b70942645aeb5f7fed1405a0c"} Dec 08 21:41:46 crc kubenswrapper[4912]: I1208 21:41:46.170261 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4d0fbbfe-8e23-4a55-b432-af0f17b5511d","Type":"ContainerStarted","Data":"10236a5a4e6a6c64f0c57dc71d78ef4cebdb7d6e3dc51684c9575fd9c26b8662"} Dec 08 21:41:46 crc kubenswrapper[4912]: I1208 21:41:46.202356 4912 generic.go:334] "Generic (PLEG): container finished" podID="2cec7604-be34-4a76-bee3-ddbb8a1b7a28" containerID="a57cf77cc1b3d0965967577d4f265f825ec16280440fe1641aa4565dd283842a" exitCode=143 Dec 08 21:41:46 crc kubenswrapper[4912]: I1208 21:41:46.202433 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-787c747fb6-jxmjq" event={"ID":"2cec7604-be34-4a76-bee3-ddbb8a1b7a28","Type":"ContainerDied","Data":"a57cf77cc1b3d0965967577d4f265f825ec16280440fe1641aa4565dd283842a"} Dec 08 21:41:46 crc kubenswrapper[4912]: I1208 21:41:46.223439 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" podUID="f8345d4c-d0b8-40ea-99e5-00dacda84662" containerName="dnsmasq-dns" containerID="cri-o://de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac" gracePeriod=10 Dec 08 21:41:46 crc kubenswrapper[4912]: I1208 21:41:46.223827 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c698b2d-df42-4919-a957-79b6deae788d","Type":"ContainerStarted","Data":"74fd3ecade2b68c8c69b2742fed663dc008b7b2291b084a5006c5e2681fcf394"} Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.056798 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.124232 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-ovsdbserver-sb\") pod \"f8345d4c-d0b8-40ea-99e5-00dacda84662\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.124436 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-ovsdbserver-nb\") pod \"f8345d4c-d0b8-40ea-99e5-00dacda84662\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.124482 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-config\") pod \"f8345d4c-d0b8-40ea-99e5-00dacda84662\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.124501 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-dns-swift-storage-0\") pod \"f8345d4c-d0b8-40ea-99e5-00dacda84662\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.124518 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8zc5\" (UniqueName: \"kubernetes.io/projected/f8345d4c-d0b8-40ea-99e5-00dacda84662-kube-api-access-d8zc5\") pod \"f8345d4c-d0b8-40ea-99e5-00dacda84662\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.124548 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-dns-svc\") pod \"f8345d4c-d0b8-40ea-99e5-00dacda84662\" (UID: \"f8345d4c-d0b8-40ea-99e5-00dacda84662\") " Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.145117 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8345d4c-d0b8-40ea-99e5-00dacda84662-kube-api-access-d8zc5" (OuterVolumeSpecName: "kube-api-access-d8zc5") pod "f8345d4c-d0b8-40ea-99e5-00dacda84662" (UID: "f8345d4c-d0b8-40ea-99e5-00dacda84662"). InnerVolumeSpecName "kube-api-access-d8zc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.226838 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8zc5\" (UniqueName: \"kubernetes.io/projected/f8345d4c-d0b8-40ea-99e5-00dacda84662-kube-api-access-d8zc5\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.239246 4912 generic.go:334] "Generic (PLEG): container finished" podID="f8345d4c-d0b8-40ea-99e5-00dacda84662" containerID="de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac" exitCode=0 Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.239783 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" event={"ID":"f8345d4c-d0b8-40ea-99e5-00dacda84662","Type":"ContainerDied","Data":"de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac"} Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.239816 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" event={"ID":"f8345d4c-d0b8-40ea-99e5-00dacda84662","Type":"ContainerDied","Data":"beb62da05881d45d487a6f6d0d0d25cb45df8caddb813fae307ea00fd677367c"} Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.239834 4912 scope.go:117] "RemoveContainer" containerID="de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.240160 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-nxvqk" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.242152 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f8345d4c-d0b8-40ea-99e5-00dacda84662" (UID: "f8345d4c-d0b8-40ea-99e5-00dacda84662"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.244461 4912 generic.go:334] "Generic (PLEG): container finished" podID="c4df2822-3cb8-47b8-b06e-15601ceb80ca" containerID="02839965b7af9e977e09f3e4132e9c7213df281b994d8235f91465b19e82bf43" exitCode=0 Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.244532 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" event={"ID":"c4df2822-3cb8-47b8-b06e-15601ceb80ca","Type":"ContainerDied","Data":"02839965b7af9e977e09f3e4132e9c7213df281b994d8235f91465b19e82bf43"} Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.250752 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4d0fbbfe-8e23-4a55-b432-af0f17b5511d","Type":"ContainerStarted","Data":"da04bd4638aa60403c92aa20a8699e81e729579a161d46ebb71701604cfe0d40"} Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.260493 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f8345d4c-d0b8-40ea-99e5-00dacda84662" (UID: "f8345d4c-d0b8-40ea-99e5-00dacda84662"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.275322 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f8345d4c-d0b8-40ea-99e5-00dacda84662" (UID: "f8345d4c-d0b8-40ea-99e5-00dacda84662"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.319359 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-config" (OuterVolumeSpecName: "config") pod "f8345d4c-d0b8-40ea-99e5-00dacda84662" (UID: "f8345d4c-d0b8-40ea-99e5-00dacda84662"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.329934 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.330007 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.330082 4912 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.330100 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.355780 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f8345d4c-d0b8-40ea-99e5-00dacda84662" (UID: "f8345d4c-d0b8-40ea-99e5-00dacda84662"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.383101 4912 scope.go:117] "RemoveContainer" containerID="c700629cb529d643f9178939670c098e57173b25f65c845453ebd502d9afc65f" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.415914 4912 scope.go:117] "RemoveContainer" containerID="de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac" Dec 08 21:41:47 crc kubenswrapper[4912]: E1208 21:41:47.416457 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac\": container with ID starting with de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac not found: ID does not exist" containerID="de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.416499 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac"} err="failed to get container status \"de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac\": rpc error: code = NotFound desc = could not find container \"de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac\": container with ID starting with de9acab3ae3742512f61a05427d82c42f9920138e9a91948812bd080f30cedac not found: ID does not exist" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.416530 4912 scope.go:117] "RemoveContainer" containerID="c700629cb529d643f9178939670c098e57173b25f65c845453ebd502d9afc65f" Dec 08 21:41:47 crc kubenswrapper[4912]: E1208 21:41:47.418218 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c700629cb529d643f9178939670c098e57173b25f65c845453ebd502d9afc65f\": container with ID starting with c700629cb529d643f9178939670c098e57173b25f65c845453ebd502d9afc65f not found: ID does not exist" containerID="c700629cb529d643f9178939670c098e57173b25f65c845453ebd502d9afc65f" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.418341 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c700629cb529d643f9178939670c098e57173b25f65c845453ebd502d9afc65f"} err="failed to get container status \"c700629cb529d643f9178939670c098e57173b25f65c845453ebd502d9afc65f\": rpc error: code = NotFound desc = could not find container \"c700629cb529d643f9178939670c098e57173b25f65c845453ebd502d9afc65f\": container with ID starting with c700629cb529d643f9178939670c098e57173b25f65c845453ebd502d9afc65f not found: ID does not exist" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.431300 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8345d4c-d0b8-40ea-99e5-00dacda84662-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.595174 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-nxvqk"] Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.619251 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-nxvqk"] Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.703460 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.862965 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-66784bb489-2dwmz"] Dec 08 21:41:47 crc kubenswrapper[4912]: E1208 21:41:47.875580 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8345d4c-d0b8-40ea-99e5-00dacda84662" containerName="dnsmasq-dns" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.875610 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8345d4c-d0b8-40ea-99e5-00dacda84662" containerName="dnsmasq-dns" Dec 08 21:41:47 crc kubenswrapper[4912]: E1208 21:41:47.875629 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8345d4c-d0b8-40ea-99e5-00dacda84662" containerName="init" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.875641 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8345d4c-d0b8-40ea-99e5-00dacda84662" containerName="init" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.875819 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8345d4c-d0b8-40ea-99e5-00dacda84662" containerName="dnsmasq-dns" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.876906 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.880078 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.880178 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.885899 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-66784bb489-2dwmz"] Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.941641 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-combined-ca-bundle\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.941699 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-config\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.941835 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-ovndb-tls-certs\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.941896 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-public-tls-certs\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.941953 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-httpd-config\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.941992 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gfz9\" (UniqueName: \"kubernetes.io/projected/1e51ce4b-382f-4365-a312-0219b77aab6e-kube-api-access-9gfz9\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:47 crc kubenswrapper[4912]: I1208 21:41:47.942062 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-internal-tls-certs\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.043606 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-combined-ca-bundle\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.043674 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-config\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.043730 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-ovndb-tls-certs\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.043782 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-public-tls-certs\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.043821 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-httpd-config\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.043857 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gfz9\" (UniqueName: \"kubernetes.io/projected/1e51ce4b-382f-4365-a312-0219b77aab6e-kube-api-access-9gfz9\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.043913 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-internal-tls-certs\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.054142 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-ovndb-tls-certs\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.054778 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-config\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.057930 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-httpd-config\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.058915 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-combined-ca-bundle\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.059842 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-public-tls-certs\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.060581 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e51ce4b-382f-4365-a312-0219b77aab6e-internal-tls-certs\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.065822 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gfz9\" (UniqueName: \"kubernetes.io/projected/1e51ce4b-382f-4365-a312-0219b77aab6e-kube-api-access-9gfz9\") pod \"neutron-66784bb489-2dwmz\" (UID: \"1e51ce4b-382f-4365-a312-0219b77aab6e\") " pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.239011 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.273117 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" event={"ID":"c4df2822-3cb8-47b8-b06e-15601ceb80ca","Type":"ContainerStarted","Data":"eca92ec2ccc639c01fc7ed4fa1c9c5fa08950a2448be4abce69e2c288754d6c2"} Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.274276 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.277792 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4d0fbbfe-8e23-4a55-b432-af0f17b5511d","Type":"ContainerStarted","Data":"6a9fdebb1400c7cf6d19d5055653f7e85b784bc786c9bc851e7c99d2f0ea6982"} Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.277931 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="4d0fbbfe-8e23-4a55-b432-af0f17b5511d" containerName="cinder-api-log" containerID="cri-o://da04bd4638aa60403c92aa20a8699e81e729579a161d46ebb71701604cfe0d40" gracePeriod=30 Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.278156 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.278200 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="4d0fbbfe-8e23-4a55-b432-af0f17b5511d" containerName="cinder-api" containerID="cri-o://6a9fdebb1400c7cf6d19d5055653f7e85b784bc786c9bc851e7c99d2f0ea6982" gracePeriod=30 Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.298054 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" podStartSLOduration=4.298023791 podStartE2EDuration="4.298023791s" podCreationTimestamp="2025-12-08 21:41:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:48.29566665 +0000 UTC m=+1390.158668733" watchObservedRunningTime="2025-12-08 21:41:48.298023791 +0000 UTC m=+1390.161025874" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.298433 4912 generic.go:334] "Generic (PLEG): container finished" podID="2cec7604-be34-4a76-bee3-ddbb8a1b7a28" containerID="6826d06f22c158b03c6b62a8f2a97416e4e959205c555ed5c96c7768d1ea5787" exitCode=0 Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.298506 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-787c747fb6-jxmjq" event={"ID":"2cec7604-be34-4a76-bee3-ddbb8a1b7a28","Type":"ContainerDied","Data":"6826d06f22c158b03c6b62a8f2a97416e4e959205c555ed5c96c7768d1ea5787"} Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.303767 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c698b2d-df42-4919-a957-79b6deae788d","Type":"ContainerStarted","Data":"f3707cea099477a5b9cf7e911ee87875b34d9dc7c92f6fa03b3b62fb3e95357c"} Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.400701 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.400681827 podStartE2EDuration="4.400681827s" podCreationTimestamp="2025-12-08 21:41:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:48.337972457 +0000 UTC m=+1390.200974540" watchObservedRunningTime="2025-12-08 21:41:48.400681827 +0000 UTC m=+1390.263683900" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.517485 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8345d4c-d0b8-40ea-99e5-00dacda84662" path="/var/lib/kubelet/pods/f8345d4c-d0b8-40ea-99e5-00dacda84662/volumes" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.610373 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.675497 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-config-data\") pod \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.675696 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-logs\") pod \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.675790 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-config-data-custom\") pod \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.675856 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlgr8\" (UniqueName: \"kubernetes.io/projected/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-kube-api-access-tlgr8\") pod \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.675981 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-combined-ca-bundle\") pod \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\" (UID: \"2cec7604-be34-4a76-bee3-ddbb8a1b7a28\") " Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.676513 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-logs" (OuterVolumeSpecName: "logs") pod "2cec7604-be34-4a76-bee3-ddbb8a1b7a28" (UID: "2cec7604-be34-4a76-bee3-ddbb8a1b7a28"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.677099 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.681764 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-kube-api-access-tlgr8" (OuterVolumeSpecName: "kube-api-access-tlgr8") pod "2cec7604-be34-4a76-bee3-ddbb8a1b7a28" (UID: "2cec7604-be34-4a76-bee3-ddbb8a1b7a28"). InnerVolumeSpecName "kube-api-access-tlgr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.685718 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2cec7604-be34-4a76-bee3-ddbb8a1b7a28" (UID: "2cec7604-be34-4a76-bee3-ddbb8a1b7a28"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.711146 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2cec7604-be34-4a76-bee3-ddbb8a1b7a28" (UID: "2cec7604-be34-4a76-bee3-ddbb8a1b7a28"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.738147 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-config-data" (OuterVolumeSpecName: "config-data") pod "2cec7604-be34-4a76-bee3-ddbb8a1b7a28" (UID: "2cec7604-be34-4a76-bee3-ddbb8a1b7a28"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.779381 4912 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.779430 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlgr8\" (UniqueName: \"kubernetes.io/projected/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-kube-api-access-tlgr8\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.779446 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:48 crc kubenswrapper[4912]: I1208 21:41:48.779457 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cec7604-be34-4a76-bee3-ddbb8a1b7a28-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.182415 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-66784bb489-2dwmz"] Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.325643 4912 generic.go:334] "Generic (PLEG): container finished" podID="4d0fbbfe-8e23-4a55-b432-af0f17b5511d" containerID="6a9fdebb1400c7cf6d19d5055653f7e85b784bc786c9bc851e7c99d2f0ea6982" exitCode=0 Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.325686 4912 generic.go:334] "Generic (PLEG): container finished" podID="4d0fbbfe-8e23-4a55-b432-af0f17b5511d" containerID="da04bd4638aa60403c92aa20a8699e81e729579a161d46ebb71701604cfe0d40" exitCode=143 Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.325733 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4d0fbbfe-8e23-4a55-b432-af0f17b5511d","Type":"ContainerDied","Data":"6a9fdebb1400c7cf6d19d5055653f7e85b784bc786c9bc851e7c99d2f0ea6982"} Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.325767 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4d0fbbfe-8e23-4a55-b432-af0f17b5511d","Type":"ContainerDied","Data":"da04bd4638aa60403c92aa20a8699e81e729579a161d46ebb71701604cfe0d40"} Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.325780 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4d0fbbfe-8e23-4a55-b432-af0f17b5511d","Type":"ContainerDied","Data":"10236a5a4e6a6c64f0c57dc71d78ef4cebdb7d6e3dc51684c9575fd9c26b8662"} Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.325790 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10236a5a4e6a6c64f0c57dc71d78ef4cebdb7d6e3dc51684c9575fd9c26b8662" Dec 08 21:41:49 crc kubenswrapper[4912]: E1208 21:41:49.327006 4912 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4574c96_aa92_4621_92e2_d8ee041d94c8.slice/crio-conmon-34432d647fab3a38b1d37480eb79f732e8f8464ca53533ee285cf5d5eedbbd60.scope\": RecentStats: unable to find data in memory cache]" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.329767 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-787c747fb6-jxmjq" event={"ID":"2cec7604-be34-4a76-bee3-ddbb8a1b7a28","Type":"ContainerDied","Data":"b98915b2ac01025d19dc425894d79c183ea00595f1a0dba7775f352f425670e0"} Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.329813 4912 scope.go:117] "RemoveContainer" containerID="6826d06f22c158b03c6b62a8f2a97416e4e959205c555ed5c96c7768d1ea5787" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.329954 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-787c747fb6-jxmjq" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.343427 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c698b2d-df42-4919-a957-79b6deae788d","Type":"ContainerStarted","Data":"cd294f65a900a7c89b9ff44ee55ebb2076c1b80e8bd202451cbb5d5a88b6768b"} Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.351841 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66784bb489-2dwmz" event={"ID":"1e51ce4b-382f-4365-a312-0219b77aab6e","Type":"ContainerStarted","Data":"5a1eb6ecb599930d559a2a7749a9027e9a81844328869c9c4fc500486085d7c5"} Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.378074 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.380203 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.496073016 podStartE2EDuration="5.38018666s" podCreationTimestamp="2025-12-08 21:41:44 +0000 UTC" firstStartedPulling="2025-12-08 21:41:45.277090685 +0000 UTC m=+1387.140092768" lastFinishedPulling="2025-12-08 21:41:46.161204329 +0000 UTC m=+1388.024206412" observedRunningTime="2025-12-08 21:41:49.372562625 +0000 UTC m=+1391.235564708" watchObservedRunningTime="2025-12-08 21:41:49.38018666 +0000 UTC m=+1391.243188753" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.393243 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r976r\" (UniqueName: \"kubernetes.io/projected/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-kube-api-access-r976r\") pod \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.393379 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-config-data-custom\") pod \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.393407 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-etc-machine-id\") pod \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.393479 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-config-data\") pod \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.393528 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-scripts\") pod \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.393625 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-logs\") pod \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.393674 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-combined-ca-bundle\") pod \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\" (UID: \"4d0fbbfe-8e23-4a55-b432-af0f17b5511d\") " Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.402854 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-logs" (OuterVolumeSpecName: "logs") pod "4d0fbbfe-8e23-4a55-b432-af0f17b5511d" (UID: "4d0fbbfe-8e23-4a55-b432-af0f17b5511d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.403402 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4d0fbbfe-8e23-4a55-b432-af0f17b5511d" (UID: "4d0fbbfe-8e23-4a55-b432-af0f17b5511d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.417270 4912 scope.go:117] "RemoveContainer" containerID="a57cf77cc1b3d0965967577d4f265f825ec16280440fe1641aa4565dd283842a" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.438100 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4d0fbbfe-8e23-4a55-b432-af0f17b5511d" (UID: "4d0fbbfe-8e23-4a55-b432-af0f17b5511d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.442719 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-scripts" (OuterVolumeSpecName: "scripts") pod "4d0fbbfe-8e23-4a55-b432-af0f17b5511d" (UID: "4d0fbbfe-8e23-4a55-b432-af0f17b5511d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.449405 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-kube-api-access-r976r" (OuterVolumeSpecName: "kube-api-access-r976r") pod "4d0fbbfe-8e23-4a55-b432-af0f17b5511d" (UID: "4d0fbbfe-8e23-4a55-b432-af0f17b5511d"). InnerVolumeSpecName "kube-api-access-r976r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.466147 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4d0fbbfe-8e23-4a55-b432-af0f17b5511d" (UID: "4d0fbbfe-8e23-4a55-b432-af0f17b5511d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.488056 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-787c747fb6-jxmjq"] Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.498684 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.498719 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.498732 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r976r\" (UniqueName: \"kubernetes.io/projected/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-kube-api-access-r976r\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.498740 4912 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.498750 4912 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.498759 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.505998 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-config-data" (OuterVolumeSpecName: "config-data") pod "4d0fbbfe-8e23-4a55-b432-af0f17b5511d" (UID: "4d0fbbfe-8e23-4a55-b432-af0f17b5511d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.509753 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-787c747fb6-jxmjq"] Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.600510 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d0fbbfe-8e23-4a55-b432-af0f17b5511d-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:49 crc kubenswrapper[4912]: I1208 21:41:49.630321 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.368584 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66784bb489-2dwmz" event={"ID":"1e51ce4b-382f-4365-a312-0219b77aab6e","Type":"ContainerStarted","Data":"a8edfa5a8fe8b598aee622c6ee1d94a7bfb01ff177549cf93dd54b50ca6770df"} Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.369276 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66784bb489-2dwmz" event={"ID":"1e51ce4b-382f-4365-a312-0219b77aab6e","Type":"ContainerStarted","Data":"3be6d2d4ed57395cd177ca224531ab61e1bb6c1ec3fe59637344d13aa5bb4405"} Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.368672 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.438476 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-66784bb489-2dwmz" podStartSLOduration=3.438450565 podStartE2EDuration="3.438450565s" podCreationTimestamp="2025-12-08 21:41:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:50.396797226 +0000 UTC m=+1392.259799319" watchObservedRunningTime="2025-12-08 21:41:50.438450565 +0000 UTC m=+1392.301452648" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.450149 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cec7604-be34-4a76-bee3-ddbb8a1b7a28" path="/var/lib/kubelet/pods/2cec7604-be34-4a76-bee3-ddbb8a1b7a28/volumes" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.451252 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.505134 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.517078 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:41:50 crc kubenswrapper[4912]: E1208 21:41:50.517529 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0fbbfe-8e23-4a55-b432-af0f17b5511d" containerName="cinder-api" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.517551 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0fbbfe-8e23-4a55-b432-af0f17b5511d" containerName="cinder-api" Dec 08 21:41:50 crc kubenswrapper[4912]: E1208 21:41:50.517584 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0fbbfe-8e23-4a55-b432-af0f17b5511d" containerName="cinder-api-log" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.517592 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0fbbfe-8e23-4a55-b432-af0f17b5511d" containerName="cinder-api-log" Dec 08 21:41:50 crc kubenswrapper[4912]: E1208 21:41:50.517608 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cec7604-be34-4a76-bee3-ddbb8a1b7a28" containerName="barbican-api-log" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.517615 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cec7604-be34-4a76-bee3-ddbb8a1b7a28" containerName="barbican-api-log" Dec 08 21:41:50 crc kubenswrapper[4912]: E1208 21:41:50.517628 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cec7604-be34-4a76-bee3-ddbb8a1b7a28" containerName="barbican-api" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.517635 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cec7604-be34-4a76-bee3-ddbb8a1b7a28" containerName="barbican-api" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.517816 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cec7604-be34-4a76-bee3-ddbb8a1b7a28" containerName="barbican-api" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.517838 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d0fbbfe-8e23-4a55-b432-af0f17b5511d" containerName="cinder-api" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.517858 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cec7604-be34-4a76-bee3-ddbb8a1b7a28" containerName="barbican-api-log" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.517872 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d0fbbfe-8e23-4a55-b432-af0f17b5511d" containerName="cinder-api-log" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.526343 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.530598 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.530893 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.531028 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.531284 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.622491 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.622536 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-logs\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.622710 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmzj8\" (UniqueName: \"kubernetes.io/projected/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-kube-api-access-kmzj8\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.622910 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-scripts\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.623051 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-config-data-custom\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.623110 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-config-data\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.623199 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.623415 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-public-tls-certs\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.623451 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.725021 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-config-data-custom\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.725103 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-config-data\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.725151 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.725201 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-public-tls-certs\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.725231 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.725275 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.725303 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-logs\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.725304 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.725489 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmzj8\" (UniqueName: \"kubernetes.io/projected/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-kube-api-access-kmzj8\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.725603 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-scripts\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.725870 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-logs\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.730745 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-config-data-custom\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.732550 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.733119 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-scripts\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.733662 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.741153 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-config-data\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.745674 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-public-tls-certs\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.745943 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmzj8\" (UniqueName: \"kubernetes.io/projected/793a5438-7a15-4ff3-b6f1-1f12dbfabe7f-kube-api-access-kmzj8\") pod \"cinder-api-0\" (UID: \"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f\") " pod="openstack/cinder-api-0" Dec 08 21:41:50 crc kubenswrapper[4912]: I1208 21:41:50.851770 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 08 21:41:51 crc kubenswrapper[4912]: I1208 21:41:51.359340 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 08 21:41:51 crc kubenswrapper[4912]: W1208 21:41:51.365674 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod793a5438_7a15_4ff3_b6f1_1f12dbfabe7f.slice/crio-57c696d213367d5bb071e92cdf83106893a32bc84d0b492f53ed5b7dabd83ab5 WatchSource:0}: Error finding container 57c696d213367d5bb071e92cdf83106893a32bc84d0b492f53ed5b7dabd83ab5: Status 404 returned error can't find the container with id 57c696d213367d5bb071e92cdf83106893a32bc84d0b492f53ed5b7dabd83ab5 Dec 08 21:41:51 crc kubenswrapper[4912]: I1208 21:41:51.379464 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f","Type":"ContainerStarted","Data":"57c696d213367d5bb071e92cdf83106893a32bc84d0b492f53ed5b7dabd83ab5"} Dec 08 21:41:51 crc kubenswrapper[4912]: I1208 21:41:51.380479 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:41:52 crc kubenswrapper[4912]: I1208 21:41:52.439739 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d0fbbfe-8e23-4a55-b432-af0f17b5511d" path="/var/lib/kubelet/pods/4d0fbbfe-8e23-4a55-b432-af0f17b5511d/volumes" Dec 08 21:41:53 crc kubenswrapper[4912]: I1208 21:41:53.404964 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f","Type":"ContainerStarted","Data":"c848365c65f222fe843bfbf5a45c1332791f6cc8111691a4c0b54e0cdeb3c3ba"} Dec 08 21:41:54 crc kubenswrapper[4912]: I1208 21:41:54.420323 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"793a5438-7a15-4ff3-b6f1-1f12dbfabe7f","Type":"ContainerStarted","Data":"823523cd17a29a7c707e33ee5ce0a49f3de6403ba231024a9555431830d0cd49"} Dec 08 21:41:54 crc kubenswrapper[4912]: I1208 21:41:54.420513 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 08 21:41:54 crc kubenswrapper[4912]: I1208 21:41:54.464468 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.464448171 podStartE2EDuration="4.464448171s" podCreationTimestamp="2025-12-08 21:41:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:41:54.440851445 +0000 UTC m=+1396.303853538" watchObservedRunningTime="2025-12-08 21:41:54.464448171 +0000 UTC m=+1396.327450254" Dec 08 21:41:54 crc kubenswrapper[4912]: I1208 21:41:54.824960 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:41:54 crc kubenswrapper[4912]: I1208 21:41:54.960876 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-mczwp"] Dec 08 21:41:54 crc kubenswrapper[4912]: I1208 21:41:54.961169 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" podUID="d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" containerName="dnsmasq-dns" containerID="cri-o://79e797e31d3691b81ba614ebfeeec6588bdf7dcb7d8a809d329368c5b30d2db0" gracePeriod=10 Dec 08 21:41:54 crc kubenswrapper[4912]: I1208 21:41:54.997358 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 08 21:41:55 crc kubenswrapper[4912]: I1208 21:41:55.055867 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:41:55 crc kubenswrapper[4912]: I1208 21:41:55.537005 4912 generic.go:334] "Generic (PLEG): container finished" podID="d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" containerID="79e797e31d3691b81ba614ebfeeec6588bdf7dcb7d8a809d329368c5b30d2db0" exitCode=0 Dec 08 21:41:55 crc kubenswrapper[4912]: I1208 21:41:55.537531 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="6c698b2d-df42-4919-a957-79b6deae788d" containerName="cinder-scheduler" containerID="cri-o://f3707cea099477a5b9cf7e911ee87875b34d9dc7c92f6fa03b3b62fb3e95357c" gracePeriod=30 Dec 08 21:41:55 crc kubenswrapper[4912]: I1208 21:41:55.538049 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" event={"ID":"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c","Type":"ContainerDied","Data":"79e797e31d3691b81ba614ebfeeec6588bdf7dcb7d8a809d329368c5b30d2db0"} Dec 08 21:41:55 crc kubenswrapper[4912]: I1208 21:41:55.539531 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="6c698b2d-df42-4919-a957-79b6deae788d" containerName="probe" containerID="cri-o://cd294f65a900a7c89b9ff44ee55ebb2076c1b80e8bd202451cbb5d5a88b6768b" gracePeriod=30 Dec 08 21:41:55 crc kubenswrapper[4912]: I1208 21:41:55.686024 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:55 crc kubenswrapper[4912]: I1208 21:41:55.836837 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-dns-svc\") pod \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " Dec 08 21:41:55 crc kubenswrapper[4912]: I1208 21:41:55.836931 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-dns-swift-storage-0\") pod \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " Dec 08 21:41:55 crc kubenswrapper[4912]: I1208 21:41:55.836973 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-config\") pod \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " Dec 08 21:41:55 crc kubenswrapper[4912]: I1208 21:41:55.836991 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-ovsdbserver-nb\") pod \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " Dec 08 21:41:55 crc kubenswrapper[4912]: I1208 21:41:55.837102 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdhdp\" (UniqueName: \"kubernetes.io/projected/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-kube-api-access-gdhdp\") pod \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " Dec 08 21:41:55 crc kubenswrapper[4912]: I1208 21:41:55.837147 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-ovsdbserver-sb\") pod \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\" (UID: \"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c\") " Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.233323 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-kube-api-access-gdhdp" (OuterVolumeSpecName: "kube-api-access-gdhdp") pod "d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" (UID: "d1e978d4-7969-4ac0-bb8a-42c224e7bd8c"). InnerVolumeSpecName "kube-api-access-gdhdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.263343 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdhdp\" (UniqueName: \"kubernetes.io/projected/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-kube-api-access-gdhdp\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.282947 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" (UID: "d1e978d4-7969-4ac0-bb8a-42c224e7bd8c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.288933 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" (UID: "d1e978d4-7969-4ac0-bb8a-42c224e7bd8c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.319549 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-config" (OuterVolumeSpecName: "config") pod "d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" (UID: "d1e978d4-7969-4ac0-bb8a-42c224e7bd8c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.339702 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" (UID: "d1e978d4-7969-4ac0-bb8a-42c224e7bd8c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.344574 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" (UID: "d1e978d4-7969-4ac0-bb8a-42c224e7bd8c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.364544 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.364836 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.364914 4912 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.364986 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.365064 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.550208 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" event={"ID":"d1e978d4-7969-4ac0-bb8a-42c224e7bd8c","Type":"ContainerDied","Data":"fb3ccccf010e97cf30d93f6d6eb21daee7eba4b427786c5b674dc2e8dada22a1"} Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.551172 4912 scope.go:117] "RemoveContainer" containerID="79e797e31d3691b81ba614ebfeeec6588bdf7dcb7d8a809d329368c5b30d2db0" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.550298 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-mczwp" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.590570 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-mczwp"] Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.595750 4912 scope.go:117] "RemoveContainer" containerID="10e00959b53f9ab07b2b7e2a1131fc9d8ce67327c89c4e281ce9192e3ddc2ac1" Dec 08 21:41:56 crc kubenswrapper[4912]: I1208 21:41:56.610026 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-mczwp"] Dec 08 21:41:57 crc kubenswrapper[4912]: I1208 21:41:57.362226 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:57 crc kubenswrapper[4912]: I1208 21:41:57.378142 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-868f84fc6-f7svl" Dec 08 21:41:57 crc kubenswrapper[4912]: I1208 21:41:57.564179 4912 generic.go:334] "Generic (PLEG): container finished" podID="6c698b2d-df42-4919-a957-79b6deae788d" containerID="cd294f65a900a7c89b9ff44ee55ebb2076c1b80e8bd202451cbb5d5a88b6768b" exitCode=0 Dec 08 21:41:57 crc kubenswrapper[4912]: I1208 21:41:57.564266 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c698b2d-df42-4919-a957-79b6deae788d","Type":"ContainerDied","Data":"cd294f65a900a7c89b9ff44ee55ebb2076c1b80e8bd202451cbb5d5a88b6768b"} Dec 08 21:41:58 crc kubenswrapper[4912]: I1208 21:41:58.472517 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" path="/var/lib/kubelet/pods/d1e978d4-7969-4ac0-bb8a-42c224e7bd8c/volumes" Dec 08 21:41:59 crc kubenswrapper[4912]: I1208 21:41:59.687118 4912 generic.go:334] "Generic (PLEG): container finished" podID="6c698b2d-df42-4919-a957-79b6deae788d" containerID="f3707cea099477a5b9cf7e911ee87875b34d9dc7c92f6fa03b3b62fb3e95357c" exitCode=0 Dec 08 21:41:59 crc kubenswrapper[4912]: I1208 21:41:59.687217 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c698b2d-df42-4919-a957-79b6deae788d","Type":"ContainerDied","Data":"f3707cea099477a5b9cf7e911ee87875b34d9dc7c92f6fa03b3b62fb3e95357c"} Dec 08 21:41:59 crc kubenswrapper[4912]: E1208 21:41:59.687362 4912 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4574c96_aa92_4621_92e2_d8ee041d94c8.slice/crio-conmon-34432d647fab3a38b1d37480eb79f732e8f8464ca53533ee285cf5d5eedbbd60.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c698b2d_df42_4919_a957_79b6deae788d.slice/crio-f3707cea099477a5b9cf7e911ee87875b34d9dc7c92f6fa03b3b62fb3e95357c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c698b2d_df42_4919_a957_79b6deae788d.slice/crio-conmon-f3707cea099477a5b9cf7e911ee87875b34d9dc7c92f6fa03b3b62fb3e95357c.scope\": RecentStats: unable to find data in memory cache]" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.481180 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.637518 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-config-data\") pod \"6c698b2d-df42-4919-a957-79b6deae788d\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.637657 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-combined-ca-bundle\") pod \"6c698b2d-df42-4919-a957-79b6deae788d\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.637696 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvmns\" (UniqueName: \"kubernetes.io/projected/6c698b2d-df42-4919-a957-79b6deae788d-kube-api-access-zvmns\") pod \"6c698b2d-df42-4919-a957-79b6deae788d\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.637726 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-scripts\") pod \"6c698b2d-df42-4919-a957-79b6deae788d\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.637868 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-config-data-custom\") pod \"6c698b2d-df42-4919-a957-79b6deae788d\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.638118 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c698b2d-df42-4919-a957-79b6deae788d-etc-machine-id\") pod \"6c698b2d-df42-4919-a957-79b6deae788d\" (UID: \"6c698b2d-df42-4919-a957-79b6deae788d\") " Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.639723 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6c698b2d-df42-4919-a957-79b6deae788d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6c698b2d-df42-4919-a957-79b6deae788d" (UID: "6c698b2d-df42-4919-a957-79b6deae788d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.652207 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6c698b2d-df42-4919-a957-79b6deae788d" (UID: "6c698b2d-df42-4919-a957-79b6deae788d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.652275 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-scripts" (OuterVolumeSpecName: "scripts") pod "6c698b2d-df42-4919-a957-79b6deae788d" (UID: "6c698b2d-df42-4919-a957-79b6deae788d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.652635 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c698b2d-df42-4919-a957-79b6deae788d-kube-api-access-zvmns" (OuterVolumeSpecName: "kube-api-access-zvmns") pod "6c698b2d-df42-4919-a957-79b6deae788d" (UID: "6c698b2d-df42-4919-a957-79b6deae788d"). InnerVolumeSpecName "kube-api-access-zvmns". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.714204 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c698b2d-df42-4919-a957-79b6deae788d" (UID: "6c698b2d-df42-4919-a957-79b6deae788d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.720023 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c698b2d-df42-4919-a957-79b6deae788d","Type":"ContainerDied","Data":"74fd3ecade2b68c8c69b2742fed663dc008b7b2291b084a5006c5e2681fcf394"} Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.720111 4912 scope.go:117] "RemoveContainer" containerID="cd294f65a900a7c89b9ff44ee55ebb2076c1b80e8bd202451cbb5d5a88b6768b" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.720161 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.806460 4912 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c698b2d-df42-4919-a957-79b6deae788d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.806500 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.806516 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvmns\" (UniqueName: \"kubernetes.io/projected/6c698b2d-df42-4919-a957-79b6deae788d-kube-api-access-zvmns\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.806529 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.806541 4912 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.855235 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-config-data" (OuterVolumeSpecName: "config-data") pod "6c698b2d-df42-4919-a957-79b6deae788d" (UID: "6c698b2d-df42-4919-a957-79b6deae788d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.907253 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c698b2d-df42-4919-a957-79b6deae788d-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:00 crc kubenswrapper[4912]: I1208 21:42:00.950987 4912 scope.go:117] "RemoveContainer" containerID="f3707cea099477a5b9cf7e911ee87875b34d9dc7c92f6fa03b3b62fb3e95357c" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.037840 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-d797fb44f-6g7nm" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.098135 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.109225 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.175959 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:42:01 crc kubenswrapper[4912]: E1208 21:42:01.336711 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" containerName="dnsmasq-dns" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.336750 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" containerName="dnsmasq-dns" Dec 08 21:42:01 crc kubenswrapper[4912]: E1208 21:42:01.336768 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c698b2d-df42-4919-a957-79b6deae788d" containerName="cinder-scheduler" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.336775 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c698b2d-df42-4919-a957-79b6deae788d" containerName="cinder-scheduler" Dec 08 21:42:01 crc kubenswrapper[4912]: E1208 21:42:01.336804 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" containerName="init" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.336809 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" containerName="init" Dec 08 21:42:01 crc kubenswrapper[4912]: E1208 21:42:01.336822 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c698b2d-df42-4919-a957-79b6deae788d" containerName="probe" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.336828 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c698b2d-df42-4919-a957-79b6deae788d" containerName="probe" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.337077 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1e978d4-7969-4ac0-bb8a-42c224e7bd8c" containerName="dnsmasq-dns" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.337090 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c698b2d-df42-4919-a957-79b6deae788d" containerName="cinder-scheduler" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.337111 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c698b2d-df42-4919-a957-79b6deae788d" containerName="probe" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.346772 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.350841 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.380438 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.531741 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb950bdf-804c-48b5-bfc2-2c92c304f143-config-data\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.532199 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cb950bdf-804c-48b5-bfc2-2c92c304f143-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.532402 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb950bdf-804c-48b5-bfc2-2c92c304f143-scripts\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.532456 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb950bdf-804c-48b5-bfc2-2c92c304f143-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.532836 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb950bdf-804c-48b5-bfc2-2c92c304f143-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.532883 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9g8gc\" (UniqueName: \"kubernetes.io/projected/cb950bdf-804c-48b5-bfc2-2c92c304f143-kube-api-access-9g8gc\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.636415 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb950bdf-804c-48b5-bfc2-2c92c304f143-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.636481 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9g8gc\" (UniqueName: \"kubernetes.io/projected/cb950bdf-804c-48b5-bfc2-2c92c304f143-kube-api-access-9g8gc\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.636523 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb950bdf-804c-48b5-bfc2-2c92c304f143-config-data\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.636578 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cb950bdf-804c-48b5-bfc2-2c92c304f143-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.636630 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb950bdf-804c-48b5-bfc2-2c92c304f143-scripts\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.636657 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb950bdf-804c-48b5-bfc2-2c92c304f143-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.637899 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cb950bdf-804c-48b5-bfc2-2c92c304f143-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.643424 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb950bdf-804c-48b5-bfc2-2c92c304f143-scripts\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.644723 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb950bdf-804c-48b5-bfc2-2c92c304f143-config-data\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.646810 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb950bdf-804c-48b5-bfc2-2c92c304f143-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.650553 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb950bdf-804c-48b5-bfc2-2c92c304f143-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.663093 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9g8gc\" (UniqueName: \"kubernetes.io/projected/cb950bdf-804c-48b5-bfc2-2c92c304f143-kube-api-access-9g8gc\") pod \"cinder-scheduler-0\" (UID: \"cb950bdf-804c-48b5-bfc2-2c92c304f143\") " pod="openstack/cinder-scheduler-0" Dec 08 21:42:01 crc kubenswrapper[4912]: I1208 21:42:01.686747 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.541558 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c698b2d-df42-4919-a957-79b6deae788d" path="/var/lib/kubelet/pods/6c698b2d-df42-4919-a957-79b6deae788d/volumes" Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.670292 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.756728 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cb950bdf-804c-48b5-bfc2-2c92c304f143","Type":"ContainerStarted","Data":"12424d7e57fb4c34f05471491968cd161982ea04009edeab8a738d6e5d52e4d3"} Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.853685 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.854956 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.856757 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-k4cqd" Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.860063 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.860276 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.867508 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.924958 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b9f7656-3af5-4d88-a713-1dad51007309-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6b9f7656-3af5-4d88-a713-1dad51007309\") " pod="openstack/openstackclient" Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.925587 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6b9f7656-3af5-4d88-a713-1dad51007309-openstack-config\") pod \"openstackclient\" (UID: \"6b9f7656-3af5-4d88-a713-1dad51007309\") " pod="openstack/openstackclient" Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.925892 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kw4l\" (UniqueName: \"kubernetes.io/projected/6b9f7656-3af5-4d88-a713-1dad51007309-kube-api-access-6kw4l\") pod \"openstackclient\" (UID: \"6b9f7656-3af5-4d88-a713-1dad51007309\") " pod="openstack/openstackclient" Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.926063 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6b9f7656-3af5-4d88-a713-1dad51007309-openstack-config-secret\") pod \"openstackclient\" (UID: \"6b9f7656-3af5-4d88-a713-1dad51007309\") " pod="openstack/openstackclient" Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.965023 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:42:02 crc kubenswrapper[4912]: I1208 21:42:02.965114 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:42:03 crc kubenswrapper[4912]: I1208 21:42:03.028316 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6b9f7656-3af5-4d88-a713-1dad51007309-openstack-config\") pod \"openstackclient\" (UID: \"6b9f7656-3af5-4d88-a713-1dad51007309\") " pod="openstack/openstackclient" Dec 08 21:42:03 crc kubenswrapper[4912]: I1208 21:42:03.121427 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kw4l\" (UniqueName: \"kubernetes.io/projected/6b9f7656-3af5-4d88-a713-1dad51007309-kube-api-access-6kw4l\") pod \"openstackclient\" (UID: \"6b9f7656-3af5-4d88-a713-1dad51007309\") " pod="openstack/openstackclient" Dec 08 21:42:03 crc kubenswrapper[4912]: I1208 21:42:03.121507 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6b9f7656-3af5-4d88-a713-1dad51007309-openstack-config-secret\") pod \"openstackclient\" (UID: \"6b9f7656-3af5-4d88-a713-1dad51007309\") " pod="openstack/openstackclient" Dec 08 21:42:03 crc kubenswrapper[4912]: I1208 21:42:03.121757 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b9f7656-3af5-4d88-a713-1dad51007309-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6b9f7656-3af5-4d88-a713-1dad51007309\") " pod="openstack/openstackclient" Dec 08 21:42:03 crc kubenswrapper[4912]: I1208 21:42:03.123398 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6b9f7656-3af5-4d88-a713-1dad51007309-openstack-config\") pod \"openstackclient\" (UID: \"6b9f7656-3af5-4d88-a713-1dad51007309\") " pod="openstack/openstackclient" Dec 08 21:42:03 crc kubenswrapper[4912]: I1208 21:42:03.142159 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6b9f7656-3af5-4d88-a713-1dad51007309-openstack-config-secret\") pod \"openstackclient\" (UID: \"6b9f7656-3af5-4d88-a713-1dad51007309\") " pod="openstack/openstackclient" Dec 08 21:42:03 crc kubenswrapper[4912]: I1208 21:42:03.151344 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kw4l\" (UniqueName: \"kubernetes.io/projected/6b9f7656-3af5-4d88-a713-1dad51007309-kube-api-access-6kw4l\") pod \"openstackclient\" (UID: \"6b9f7656-3af5-4d88-a713-1dad51007309\") " pod="openstack/openstackclient" Dec 08 21:42:03 crc kubenswrapper[4912]: I1208 21:42:03.159867 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b9f7656-3af5-4d88-a713-1dad51007309-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6b9f7656-3af5-4d88-a713-1dad51007309\") " pod="openstack/openstackclient" Dec 08 21:42:03 crc kubenswrapper[4912]: I1208 21:42:03.189144 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 08 21:42:03 crc kubenswrapper[4912]: I1208 21:42:03.792307 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 08 21:42:04 crc kubenswrapper[4912]: I1208 21:42:04.806407 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"6b9f7656-3af5-4d88-a713-1dad51007309","Type":"ContainerStarted","Data":"99beb9cdf36f0fa64dad9170b48c504a39c0a29f4c3c354f58b8adae9b8c79bf"} Dec 08 21:42:04 crc kubenswrapper[4912]: I1208 21:42:04.809980 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cb950bdf-804c-48b5-bfc2-2c92c304f143","Type":"ContainerStarted","Data":"7214a552f913cff78c372da36055cbfda07fee1b9b8b75502851fda149bbb4e6"} Dec 08 21:42:05 crc kubenswrapper[4912]: I1208 21:42:05.490298 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 08 21:42:05 crc kubenswrapper[4912]: I1208 21:42:05.914403 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cb950bdf-804c-48b5-bfc2-2c92c304f143","Type":"ContainerStarted","Data":"a9ea559f42525b278468026aef6f24f9bcb3c8cc88617bd4fb50ca9db4714eb1"} Dec 08 21:42:06 crc kubenswrapper[4912]: I1208 21:42:06.360387 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.360369702 podStartE2EDuration="5.360369702s" podCreationTimestamp="2025-12-08 21:42:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:06.344217937 +0000 UTC m=+1408.207220020" watchObservedRunningTime="2025-12-08 21:42:06.360369702 +0000 UTC m=+1408.223371785" Dec 08 21:42:06 crc kubenswrapper[4912]: I1208 21:42:06.727870 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 08 21:42:08 crc kubenswrapper[4912]: I1208 21:42:08.936264 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-api-0" podUID="793a5438-7a15-4ff3-b6f1-1f12dbfabe7f" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.164:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:42:10 crc kubenswrapper[4912]: E1208 21:42:10.078695 4912 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4574c96_aa92_4621_92e2_d8ee041d94c8.slice/crio-conmon-34432d647fab3a38b1d37480eb79f732e8f8464ca53533ee285cf5d5eedbbd60.scope\": RecentStats: unable to find data in memory cache]" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.235315 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-589fbdcc4f-69fll"] Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.237443 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.241999 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.242114 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.242225 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.245052 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-589fbdcc4f-69fll"] Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.270631 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20bcf0b1-ac41-4641-8287-cd62c7ab1157-combined-ca-bundle\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.270675 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/20bcf0b1-ac41-4641-8287-cd62c7ab1157-internal-tls-certs\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.270727 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20bcf0b1-ac41-4641-8287-cd62c7ab1157-config-data\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.270746 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnj64\" (UniqueName: \"kubernetes.io/projected/20bcf0b1-ac41-4641-8287-cd62c7ab1157-kube-api-access-xnj64\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.270837 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20bcf0b1-ac41-4641-8287-cd62c7ab1157-public-tls-certs\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.270968 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20bcf0b1-ac41-4641-8287-cd62c7ab1157-log-httpd\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.271167 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/20bcf0b1-ac41-4641-8287-cd62c7ab1157-etc-swift\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.271252 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20bcf0b1-ac41-4641-8287-cd62c7ab1157-run-httpd\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.372422 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20bcf0b1-ac41-4641-8287-cd62c7ab1157-run-httpd\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.372484 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20bcf0b1-ac41-4641-8287-cd62c7ab1157-combined-ca-bundle\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.372507 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/20bcf0b1-ac41-4641-8287-cd62c7ab1157-internal-tls-certs\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.372553 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20bcf0b1-ac41-4641-8287-cd62c7ab1157-config-data\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.372573 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnj64\" (UniqueName: \"kubernetes.io/projected/20bcf0b1-ac41-4641-8287-cd62c7ab1157-kube-api-access-xnj64\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.372605 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20bcf0b1-ac41-4641-8287-cd62c7ab1157-public-tls-certs\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.372655 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20bcf0b1-ac41-4641-8287-cd62c7ab1157-log-httpd\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.372729 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/20bcf0b1-ac41-4641-8287-cd62c7ab1157-etc-swift\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.372862 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20bcf0b1-ac41-4641-8287-cd62c7ab1157-run-httpd\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.373762 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20bcf0b1-ac41-4641-8287-cd62c7ab1157-log-httpd\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.379966 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/20bcf0b1-ac41-4641-8287-cd62c7ab1157-internal-tls-certs\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.380223 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20bcf0b1-ac41-4641-8287-cd62c7ab1157-public-tls-certs\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.381226 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20bcf0b1-ac41-4641-8287-cd62c7ab1157-combined-ca-bundle\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.382121 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20bcf0b1-ac41-4641-8287-cd62c7ab1157-config-data\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.387454 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/20bcf0b1-ac41-4641-8287-cd62c7ab1157-etc-swift\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.395783 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnj64\" (UniqueName: \"kubernetes.io/projected/20bcf0b1-ac41-4641-8287-cd62c7ab1157-kube-api-access-xnj64\") pod \"swift-proxy-589fbdcc4f-69fll\" (UID: \"20bcf0b1-ac41-4641-8287-cd62c7ab1157\") " pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.580447 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:11 crc kubenswrapper[4912]: I1208 21:42:11.675370 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:42:12 crc kubenswrapper[4912]: I1208 21:42:12.299254 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 08 21:42:12 crc kubenswrapper[4912]: I1208 21:42:12.403829 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-589fbdcc4f-69fll"] Dec 08 21:42:13 crc kubenswrapper[4912]: I1208 21:42:13.453268 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-589fbdcc4f-69fll" event={"ID":"20bcf0b1-ac41-4641-8287-cd62c7ab1157","Type":"ContainerStarted","Data":"fc031cdc2414d6a5a2697888b3e674fe4232895a76c3c6329698bd958596b8fd"} Dec 08 21:42:13 crc kubenswrapper[4912]: I1208 21:42:13.454788 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-589fbdcc4f-69fll" event={"ID":"20bcf0b1-ac41-4641-8287-cd62c7ab1157","Type":"ContainerStarted","Data":"0edf068f97cd0404c53fb2249689303f2c5a83e6de85443d8a987ebbba93ef83"} Dec 08 21:42:13 crc kubenswrapper[4912]: I1208 21:42:13.454871 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-589fbdcc4f-69fll" event={"ID":"20bcf0b1-ac41-4641-8287-cd62c7ab1157","Type":"ContainerStarted","Data":"df80f930fd3bc9d19f7f69271e1d04da30e54ccae469aaa154dd9e21a792a885"} Dec 08 21:42:13 crc kubenswrapper[4912]: I1208 21:42:13.454953 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:13 crc kubenswrapper[4912]: I1208 21:42:13.455245 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:13 crc kubenswrapper[4912]: I1208 21:42:13.481944 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-589fbdcc4f-69fll" podStartSLOduration=2.481922499 podStartE2EDuration="2.481922499s" podCreationTimestamp="2025-12-08 21:42:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:13.478237715 +0000 UTC m=+1415.341239808" watchObservedRunningTime="2025-12-08 21:42:13.481922499 +0000 UTC m=+1415.344924582" Dec 08 21:42:14 crc kubenswrapper[4912]: I1208 21:42:14.194169 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:42:14 crc kubenswrapper[4912]: I1208 21:42:14.194417 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="dfea36e4-18b5-4e25-a169-f91e8058ee69" containerName="glance-log" containerID="cri-o://4e04b31cf9796cf0e96c2c4f2db773b81af3dc958ecd94485556a6f4b88dbd0c" gracePeriod=30 Dec 08 21:42:14 crc kubenswrapper[4912]: I1208 21:42:14.194518 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="dfea36e4-18b5-4e25-a169-f91e8058ee69" containerName="glance-httpd" containerID="cri-o://51d517c990744464e25749722ed2c757dfeef6fe3ac767932698e077889a82c9" gracePeriod=30 Dec 08 21:42:14 crc kubenswrapper[4912]: I1208 21:42:14.470452 4912 generic.go:334] "Generic (PLEG): container finished" podID="dfea36e4-18b5-4e25-a169-f91e8058ee69" containerID="4e04b31cf9796cf0e96c2c4f2db773b81af3dc958ecd94485556a6f4b88dbd0c" exitCode=143 Dec 08 21:42:14 crc kubenswrapper[4912]: I1208 21:42:14.472136 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dfea36e4-18b5-4e25-a169-f91e8058ee69","Type":"ContainerDied","Data":"4e04b31cf9796cf0e96c2c4f2db773b81af3dc958ecd94485556a6f4b88dbd0c"} Dec 08 21:42:15 crc kubenswrapper[4912]: I1208 21:42:15.296625 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:15 crc kubenswrapper[4912]: I1208 21:42:15.296889 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="931060bd-5dcf-4163-9da8-aed7374af480" containerName="glance-log" containerID="cri-o://0a36acd2d6a74c1622042414d9487cf2bcb46fab5b6db7fdad9cf6670fea0fd0" gracePeriod=30 Dec 08 21:42:15 crc kubenswrapper[4912]: I1208 21:42:15.296988 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="931060bd-5dcf-4163-9da8-aed7374af480" containerName="glance-httpd" containerID="cri-o://598783c498f4bc2640e24c42329e86797a5374c25e552dcf78371d0a1d067a6e" gracePeriod=30 Dec 08 21:42:15 crc kubenswrapper[4912]: I1208 21:42:15.483410 4912 generic.go:334] "Generic (PLEG): container finished" podID="931060bd-5dcf-4163-9da8-aed7374af480" containerID="0a36acd2d6a74c1622042414d9487cf2bcb46fab5b6db7fdad9cf6670fea0fd0" exitCode=143 Dec 08 21:42:15 crc kubenswrapper[4912]: I1208 21:42:15.483547 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"931060bd-5dcf-4163-9da8-aed7374af480","Type":"ContainerDied","Data":"0a36acd2d6a74c1622042414d9487cf2bcb46fab5b6db7fdad9cf6670fea0fd0"} Dec 08 21:42:17 crc kubenswrapper[4912]: I1208 21:42:17.510442 4912 generic.go:334] "Generic (PLEG): container finished" podID="dfea36e4-18b5-4e25-a169-f91e8058ee69" containerID="51d517c990744464e25749722ed2c757dfeef6fe3ac767932698e077889a82c9" exitCode=0 Dec 08 21:42:17 crc kubenswrapper[4912]: I1208 21:42:17.510511 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dfea36e4-18b5-4e25-a169-f91e8058ee69","Type":"ContainerDied","Data":"51d517c990744464e25749722ed2c757dfeef6fe3ac767932698e077889a82c9"} Dec 08 21:42:17 crc kubenswrapper[4912]: I1208 21:42:17.822229 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-gtnp8"] Dec 08 21:42:17 crc kubenswrapper[4912]: I1208 21:42:17.823691 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gtnp8" Dec 08 21:42:17 crc kubenswrapper[4912]: I1208 21:42:17.836877 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-gtnp8"] Dec 08 21:42:17 crc kubenswrapper[4912]: I1208 21:42:17.925581 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwp55\" (UniqueName: \"kubernetes.io/projected/a2ed86f7-7769-4ccd-af95-9377719f1856-kube-api-access-dwp55\") pod \"nova-api-db-create-gtnp8\" (UID: \"a2ed86f7-7769-4ccd-af95-9377719f1856\") " pod="openstack/nova-api-db-create-gtnp8" Dec 08 21:42:17 crc kubenswrapper[4912]: I1208 21:42:17.925992 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ed86f7-7769-4ccd-af95-9377719f1856-operator-scripts\") pod \"nova-api-db-create-gtnp8\" (UID: \"a2ed86f7-7769-4ccd-af95-9377719f1856\") " pod="openstack/nova-api-db-create-gtnp8" Dec 08 21:42:17 crc kubenswrapper[4912]: I1208 21:42:17.944920 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-72qk9"] Dec 08 21:42:17 crc kubenswrapper[4912]: I1208 21:42:17.946517 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-72qk9" Dec 08 21:42:17 crc kubenswrapper[4912]: I1208 21:42:17.955393 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-72qk9"] Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.028059 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ed86f7-7769-4ccd-af95-9377719f1856-operator-scripts\") pod \"nova-api-db-create-gtnp8\" (UID: \"a2ed86f7-7769-4ccd-af95-9377719f1856\") " pod="openstack/nova-api-db-create-gtnp8" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.028241 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwp55\" (UniqueName: \"kubernetes.io/projected/a2ed86f7-7769-4ccd-af95-9377719f1856-kube-api-access-dwp55\") pod \"nova-api-db-create-gtnp8\" (UID: \"a2ed86f7-7769-4ccd-af95-9377719f1856\") " pod="openstack/nova-api-db-create-gtnp8" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.029570 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ed86f7-7769-4ccd-af95-9377719f1856-operator-scripts\") pod \"nova-api-db-create-gtnp8\" (UID: \"a2ed86f7-7769-4ccd-af95-9377719f1856\") " pod="openstack/nova-api-db-create-gtnp8" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.029632 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-8xbcq"] Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.032109 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8xbcq" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.053062 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-8xbcq"] Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.070324 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwp55\" (UniqueName: \"kubernetes.io/projected/a2ed86f7-7769-4ccd-af95-9377719f1856-kube-api-access-dwp55\") pod \"nova-api-db-create-gtnp8\" (UID: \"a2ed86f7-7769-4ccd-af95-9377719f1856\") " pod="openstack/nova-api-db-create-gtnp8" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.081255 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-dc7c-account-create-update-x4cjw"] Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.097893 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-dc7c-account-create-update-x4cjw" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.107332 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.117385 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-dc7c-account-create-update-x4cjw"] Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.129663 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b451d56-df8d-43b1-bcc0-18aeba033358-operator-scripts\") pod \"nova-cell0-db-create-72qk9\" (UID: \"5b451d56-df8d-43b1-bcc0-18aeba033358\") " pod="openstack/nova-cell0-db-create-72qk9" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.129862 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9nms\" (UniqueName: \"kubernetes.io/projected/5b451d56-df8d-43b1-bcc0-18aeba033358-kube-api-access-l9nms\") pod \"nova-cell0-db-create-72qk9\" (UID: \"5b451d56-df8d-43b1-bcc0-18aeba033358\") " pod="openstack/nova-cell0-db-create-72qk9" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.144764 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gtnp8" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.228461 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-d03c-account-create-update-5xf7p"] Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.229918 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.231236 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b451d56-df8d-43b1-bcc0-18aeba033358-operator-scripts\") pod \"nova-cell0-db-create-72qk9\" (UID: \"5b451d56-df8d-43b1-bcc0-18aeba033358\") " pod="openstack/nova-cell0-db-create-72qk9" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.231284 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcxb7\" (UniqueName: \"kubernetes.io/projected/f5416848-8b0d-4e46-bdba-8286d1ba9c2c-kube-api-access-zcxb7\") pod \"nova-api-dc7c-account-create-update-x4cjw\" (UID: \"f5416848-8b0d-4e46-bdba-8286d1ba9c2c\") " pod="openstack/nova-api-dc7c-account-create-update-x4cjw" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.231341 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hq5f5\" (UniqueName: \"kubernetes.io/projected/d876181b-ee71-4961-a40b-4bf1f634bc59-kube-api-access-hq5f5\") pod \"nova-cell1-db-create-8xbcq\" (UID: \"d876181b-ee71-4961-a40b-4bf1f634bc59\") " pod="openstack/nova-cell1-db-create-8xbcq" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.231383 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d876181b-ee71-4961-a40b-4bf1f634bc59-operator-scripts\") pod \"nova-cell1-db-create-8xbcq\" (UID: \"d876181b-ee71-4961-a40b-4bf1f634bc59\") " pod="openstack/nova-cell1-db-create-8xbcq" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.231415 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5416848-8b0d-4e46-bdba-8286d1ba9c2c-operator-scripts\") pod \"nova-api-dc7c-account-create-update-x4cjw\" (UID: \"f5416848-8b0d-4e46-bdba-8286d1ba9c2c\") " pod="openstack/nova-api-dc7c-account-create-update-x4cjw" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.231502 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9nms\" (UniqueName: \"kubernetes.io/projected/5b451d56-df8d-43b1-bcc0-18aeba033358-kube-api-access-l9nms\") pod \"nova-cell0-db-create-72qk9\" (UID: \"5b451d56-df8d-43b1-bcc0-18aeba033358\") " pod="openstack/nova-cell0-db-create-72qk9" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.232710 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b451d56-df8d-43b1-bcc0-18aeba033358-operator-scripts\") pod \"nova-cell0-db-create-72qk9\" (UID: \"5b451d56-df8d-43b1-bcc0-18aeba033358\") " pod="openstack/nova-cell0-db-create-72qk9" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.232997 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.243259 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d03c-account-create-update-5xf7p"] Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.262994 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9nms\" (UniqueName: \"kubernetes.io/projected/5b451d56-df8d-43b1-bcc0-18aeba033358-kube-api-access-l9nms\") pod \"nova-cell0-db-create-72qk9\" (UID: \"5b451d56-df8d-43b1-bcc0-18aeba033358\") " pod="openstack/nova-cell0-db-create-72qk9" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.271002 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-72qk9" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.287807 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-66784bb489-2dwmz" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.337403 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcxb7\" (UniqueName: \"kubernetes.io/projected/f5416848-8b0d-4e46-bdba-8286d1ba9c2c-kube-api-access-zcxb7\") pod \"nova-api-dc7c-account-create-update-x4cjw\" (UID: \"f5416848-8b0d-4e46-bdba-8286d1ba9c2c\") " pod="openstack/nova-api-dc7c-account-create-update-x4cjw" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.337457 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/780887b8-a649-4d74-b050-43fc706b23cb-operator-scripts\") pod \"nova-cell0-d03c-account-create-update-5xf7p\" (UID: \"780887b8-a649-4d74-b050-43fc706b23cb\") " pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.337495 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hq5f5\" (UniqueName: \"kubernetes.io/projected/d876181b-ee71-4961-a40b-4bf1f634bc59-kube-api-access-hq5f5\") pod \"nova-cell1-db-create-8xbcq\" (UID: \"d876181b-ee71-4961-a40b-4bf1f634bc59\") " pod="openstack/nova-cell1-db-create-8xbcq" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.337529 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d876181b-ee71-4961-a40b-4bf1f634bc59-operator-scripts\") pod \"nova-cell1-db-create-8xbcq\" (UID: \"d876181b-ee71-4961-a40b-4bf1f634bc59\") " pod="openstack/nova-cell1-db-create-8xbcq" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.337554 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5416848-8b0d-4e46-bdba-8286d1ba9c2c-operator-scripts\") pod \"nova-api-dc7c-account-create-update-x4cjw\" (UID: \"f5416848-8b0d-4e46-bdba-8286d1ba9c2c\") " pod="openstack/nova-api-dc7c-account-create-update-x4cjw" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.337646 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9dlv\" (UniqueName: \"kubernetes.io/projected/780887b8-a649-4d74-b050-43fc706b23cb-kube-api-access-z9dlv\") pod \"nova-cell0-d03c-account-create-update-5xf7p\" (UID: \"780887b8-a649-4d74-b050-43fc706b23cb\") " pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.339543 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d876181b-ee71-4961-a40b-4bf1f634bc59-operator-scripts\") pod \"nova-cell1-db-create-8xbcq\" (UID: \"d876181b-ee71-4961-a40b-4bf1f634bc59\") " pod="openstack/nova-cell1-db-create-8xbcq" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.340013 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5416848-8b0d-4e46-bdba-8286d1ba9c2c-operator-scripts\") pod \"nova-api-dc7c-account-create-update-x4cjw\" (UID: \"f5416848-8b0d-4e46-bdba-8286d1ba9c2c\") " pod="openstack/nova-api-dc7c-account-create-update-x4cjw" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.353878 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-29a0-account-create-update-8ftmm"] Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.355551 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.362582 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-29a0-account-create-update-8ftmm"] Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.371707 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcxb7\" (UniqueName: \"kubernetes.io/projected/f5416848-8b0d-4e46-bdba-8286d1ba9c2c-kube-api-access-zcxb7\") pod \"nova-api-dc7c-account-create-update-x4cjw\" (UID: \"f5416848-8b0d-4e46-bdba-8286d1ba9c2c\") " pod="openstack/nova-api-dc7c-account-create-update-x4cjw" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.385239 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.385977 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hq5f5\" (UniqueName: \"kubernetes.io/projected/d876181b-ee71-4961-a40b-4bf1f634bc59-kube-api-access-hq5f5\") pod \"nova-cell1-db-create-8xbcq\" (UID: \"d876181b-ee71-4961-a40b-4bf1f634bc59\") " pod="openstack/nova-cell1-db-create-8xbcq" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.407523 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-688d6dd778-96vcb"] Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.407979 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-688d6dd778-96vcb" podUID="6a6ad257-bd45-428d-9010-7bc9e98cd463" containerName="neutron-api" containerID="cri-o://46bdf0e0c8682b0558c86c0c24c8efa3c607dd557b34f4c6db9a514f63f4529a" gracePeriod=30 Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.408750 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-688d6dd778-96vcb" podUID="6a6ad257-bd45-428d-9010-7bc9e98cd463" containerName="neutron-httpd" containerID="cri-o://8e8d05a39de6569a5bad68e352ba97aa7e0d09d3dbb6c332f0e86f04b68c88d0" gracePeriod=30 Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.440969 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9dlv\" (UniqueName: \"kubernetes.io/projected/780887b8-a649-4d74-b050-43fc706b23cb-kube-api-access-z9dlv\") pod \"nova-cell0-d03c-account-create-update-5xf7p\" (UID: \"780887b8-a649-4d74-b050-43fc706b23cb\") " pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.441154 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/780887b8-a649-4d74-b050-43fc706b23cb-operator-scripts\") pod \"nova-cell0-d03c-account-create-update-5xf7p\" (UID: \"780887b8-a649-4d74-b050-43fc706b23cb\") " pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.442772 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8xbcq" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.443447 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/780887b8-a649-4d74-b050-43fc706b23cb-operator-scripts\") pod \"nova-cell0-d03c-account-create-update-5xf7p\" (UID: \"780887b8-a649-4d74-b050-43fc706b23cb\") " pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.461065 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9dlv\" (UniqueName: \"kubernetes.io/projected/780887b8-a649-4d74-b050-43fc706b23cb-kube-api-access-z9dlv\") pod \"nova-cell0-d03c-account-create-update-5xf7p\" (UID: \"780887b8-a649-4d74-b050-43fc706b23cb\") " pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.461527 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-dc7c-account-create-update-x4cjw" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.548681 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c46d9ff-9bd3-434b-9917-db43f38a2320-operator-scripts\") pod \"nova-cell1-29a0-account-create-update-8ftmm\" (UID: \"9c46d9ff-9bd3-434b-9917-db43f38a2320\") " pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.548801 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pv94\" (UniqueName: \"kubernetes.io/projected/9c46d9ff-9bd3-434b-9917-db43f38a2320-kube-api-access-8pv94\") pod \"nova-cell1-29a0-account-create-update-8ftmm\" (UID: \"9c46d9ff-9bd3-434b-9917-db43f38a2320\") " pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.556244 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.652968 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c46d9ff-9bd3-434b-9917-db43f38a2320-operator-scripts\") pod \"nova-cell1-29a0-account-create-update-8ftmm\" (UID: \"9c46d9ff-9bd3-434b-9917-db43f38a2320\") " pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.653507 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pv94\" (UniqueName: \"kubernetes.io/projected/9c46d9ff-9bd3-434b-9917-db43f38a2320-kube-api-access-8pv94\") pod \"nova-cell1-29a0-account-create-update-8ftmm\" (UID: \"9c46d9ff-9bd3-434b-9917-db43f38a2320\") " pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.655019 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c46d9ff-9bd3-434b-9917-db43f38a2320-operator-scripts\") pod \"nova-cell1-29a0-account-create-update-8ftmm\" (UID: \"9c46d9ff-9bd3-434b-9917-db43f38a2320\") " pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.679711 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pv94\" (UniqueName: \"kubernetes.io/projected/9c46d9ff-9bd3-434b-9917-db43f38a2320-kube-api-access-8pv94\") pod \"nova-cell1-29a0-account-create-update-8ftmm\" (UID: \"9c46d9ff-9bd3-434b-9917-db43f38a2320\") " pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" Dec 08 21:42:18 crc kubenswrapper[4912]: I1208 21:42:18.893543 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" Dec 08 21:42:19 crc kubenswrapper[4912]: I1208 21:42:19.539376 4912 generic.go:334] "Generic (PLEG): container finished" podID="931060bd-5dcf-4163-9da8-aed7374af480" containerID="598783c498f4bc2640e24c42329e86797a5374c25e552dcf78371d0a1d067a6e" exitCode=0 Dec 08 21:42:19 crc kubenswrapper[4912]: I1208 21:42:19.539675 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"931060bd-5dcf-4163-9da8-aed7374af480","Type":"ContainerDied","Data":"598783c498f4bc2640e24c42329e86797a5374c25e552dcf78371d0a1d067a6e"} Dec 08 21:42:19 crc kubenswrapper[4912]: I1208 21:42:19.543918 4912 generic.go:334] "Generic (PLEG): container finished" podID="6a6ad257-bd45-428d-9010-7bc9e98cd463" containerID="8e8d05a39de6569a5bad68e352ba97aa7e0d09d3dbb6c332f0e86f04b68c88d0" exitCode=0 Dec 08 21:42:19 crc kubenswrapper[4912]: I1208 21:42:19.543964 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-688d6dd778-96vcb" event={"ID":"6a6ad257-bd45-428d-9010-7bc9e98cd463","Type":"ContainerDied","Data":"8e8d05a39de6569a5bad68e352ba97aa7e0d09d3dbb6c332f0e86f04b68c88d0"} Dec 08 21:42:20 crc kubenswrapper[4912]: I1208 21:42:20.076844 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="dfea36e4-18b5-4e25-a169-f91e8058ee69" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.148:9292/healthcheck\": dial tcp 10.217.0.148:9292: connect: connection refused" Dec 08 21:42:20 crc kubenswrapper[4912]: I1208 21:42:20.076844 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="dfea36e4-18b5-4e25-a169-f91e8058ee69" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.148:9292/healthcheck\": dial tcp 10.217.0.148:9292: connect: connection refused" Dec 08 21:42:20 crc kubenswrapper[4912]: E1208 21:42:20.339600 4912 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4574c96_aa92_4621_92e2_d8ee041d94c8.slice/crio-conmon-34432d647fab3a38b1d37480eb79f732e8f8464ca53533ee285cf5d5eedbbd60.scope\": RecentStats: unable to find data in memory cache]" Dec 08 21:42:21 crc kubenswrapper[4912]: I1208 21:42:21.028088 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="931060bd-5dcf-4163-9da8-aed7374af480" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.150:9292/healthcheck\": dial tcp 10.217.0.150:9292: connect: connection refused" Dec 08 21:42:21 crc kubenswrapper[4912]: I1208 21:42:21.028095 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="931060bd-5dcf-4163-9da8-aed7374af480" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.150:9292/healthcheck\": dial tcp 10.217.0.150:9292: connect: connection refused" Dec 08 21:42:21 crc kubenswrapper[4912]: I1208 21:42:21.586513 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:21 crc kubenswrapper[4912]: I1208 21:42:21.587124 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-589fbdcc4f-69fll" Dec 08 21:42:22 crc kubenswrapper[4912]: I1208 21:42:22.573869 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"6b9f7656-3af5-4d88-a713-1dad51007309","Type":"ContainerStarted","Data":"7e4b54fb89fbf89e4bf590ce58b6e86f4f7065956d89b01bedb79b6ecfa78a1c"} Dec 08 21:42:22 crc kubenswrapper[4912]: I1208 21:42:22.602433 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.180655447 podStartE2EDuration="20.602411138s" podCreationTimestamp="2025-12-08 21:42:02 +0000 UTC" firstStartedPulling="2025-12-08 21:42:03.799328295 +0000 UTC m=+1405.662330378" lastFinishedPulling="2025-12-08 21:42:22.221083986 +0000 UTC m=+1424.084086069" observedRunningTime="2025-12-08 21:42:22.592845352 +0000 UTC m=+1424.455847445" watchObservedRunningTime="2025-12-08 21:42:22.602411138 +0000 UTC m=+1424.465413221" Dec 08 21:42:22 crc kubenswrapper[4912]: I1208 21:42:22.710763 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-8xbcq"] Dec 08 21:42:22 crc kubenswrapper[4912]: I1208 21:42:22.894020 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:42:22 crc kubenswrapper[4912]: I1208 21:42:22.993859 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-gtnp8"] Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.024693 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-72qk9"] Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.055210 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-combined-ca-bundle\") pod \"931060bd-5dcf-4163-9da8-aed7374af480\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.055248 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/931060bd-5dcf-4163-9da8-aed7374af480-logs\") pod \"931060bd-5dcf-4163-9da8-aed7374af480\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.056077 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/931060bd-5dcf-4163-9da8-aed7374af480-httpd-run\") pod \"931060bd-5dcf-4163-9da8-aed7374af480\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.056153 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-public-tls-certs\") pod \"931060bd-5dcf-4163-9da8-aed7374af480\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.056248 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"931060bd-5dcf-4163-9da8-aed7374af480\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.056332 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jgxg\" (UniqueName: \"kubernetes.io/projected/931060bd-5dcf-4163-9da8-aed7374af480-kube-api-access-8jgxg\") pod \"931060bd-5dcf-4163-9da8-aed7374af480\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.056365 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-scripts\") pod \"931060bd-5dcf-4163-9da8-aed7374af480\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.056450 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-config-data\") pod \"931060bd-5dcf-4163-9da8-aed7374af480\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.056514 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/931060bd-5dcf-4163-9da8-aed7374af480-logs" (OuterVolumeSpecName: "logs") pod "931060bd-5dcf-4163-9da8-aed7374af480" (UID: "931060bd-5dcf-4163-9da8-aed7374af480"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.056886 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/931060bd-5dcf-4163-9da8-aed7374af480-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.056995 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/931060bd-5dcf-4163-9da8-aed7374af480-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "931060bd-5dcf-4163-9da8-aed7374af480" (UID: "931060bd-5dcf-4163-9da8-aed7374af480"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.066879 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/931060bd-5dcf-4163-9da8-aed7374af480-kube-api-access-8jgxg" (OuterVolumeSpecName: "kube-api-access-8jgxg") pod "931060bd-5dcf-4163-9da8-aed7374af480" (UID: "931060bd-5dcf-4163-9da8-aed7374af480"). InnerVolumeSpecName "kube-api-access-8jgxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.078271 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-scripts" (OuterVolumeSpecName: "scripts") pod "931060bd-5dcf-4163-9da8-aed7374af480" (UID: "931060bd-5dcf-4163-9da8-aed7374af480"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.078414 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d03c-account-create-update-5xf7p"] Dec 08 21:42:23 crc kubenswrapper[4912]: W1208 21:42:23.088398 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod780887b8_a649_4d74_b050_43fc706b23cb.slice/crio-4f01cb247bc7b5f4c9f12cf4d8f7b25f769d062acc6e16e4ce5c877321bb3af4 WatchSource:0}: Error finding container 4f01cb247bc7b5f4c9f12cf4d8f7b25f769d062acc6e16e4ce5c877321bb3af4: Status 404 returned error can't find the container with id 4f01cb247bc7b5f4c9f12cf4d8f7b25f769d062acc6e16e4ce5c877321bb3af4 Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.088661 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0" (OuterVolumeSpecName: "glance") pod "931060bd-5dcf-4163-9da8-aed7374af480" (UID: "931060bd-5dcf-4163-9da8-aed7374af480"). InnerVolumeSpecName "pvc-f73b914b-b926-44ad-a1ae-1553428892b0". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.119541 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-29a0-account-create-update-8ftmm"] Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.157812 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "931060bd-5dcf-4163-9da8-aed7374af480" (UID: "931060bd-5dcf-4163-9da8-aed7374af480"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.158181 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-combined-ca-bundle\") pod \"931060bd-5dcf-4163-9da8-aed7374af480\" (UID: \"931060bd-5dcf-4163-9da8-aed7374af480\") " Dec 08 21:42:23 crc kubenswrapper[4912]: W1208 21:42:23.158311 4912 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/931060bd-5dcf-4163-9da8-aed7374af480/volumes/kubernetes.io~secret/combined-ca-bundle Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.158323 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "931060bd-5dcf-4163-9da8-aed7374af480" (UID: "931060bd-5dcf-4163-9da8-aed7374af480"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.160165 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.160188 4912 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/931060bd-5dcf-4163-9da8-aed7374af480-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.160228 4912 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") on node \"crc\" " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.160239 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jgxg\" (UniqueName: \"kubernetes.io/projected/931060bd-5dcf-4163-9da8-aed7374af480-kube-api-access-8jgxg\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.160248 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.195478 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-dc7c-account-create-update-x4cjw"] Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.208357 4912 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.208910 4912 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-f73b914b-b926-44ad-a1ae-1553428892b0" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0") on node "crc" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.241589 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-config-data" (OuterVolumeSpecName: "config-data") pod "931060bd-5dcf-4163-9da8-aed7374af480" (UID: "931060bd-5dcf-4163-9da8-aed7374af480"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.264225 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.265982 4912 reconciler_common.go:293] "Volume detached for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.288145 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "931060bd-5dcf-4163-9da8-aed7374af480" (UID: "931060bd-5dcf-4163-9da8-aed7374af480"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.302491 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.368522 4912 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/931060bd-5dcf-4163-9da8-aed7374af480-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.469467 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-combined-ca-bundle\") pod \"dfea36e4-18b5-4e25-a169-f91e8058ee69\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.469522 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfea36e4-18b5-4e25-a169-f91e8058ee69-logs\") pod \"dfea36e4-18b5-4e25-a169-f91e8058ee69\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.469570 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bmb5\" (UniqueName: \"kubernetes.io/projected/dfea36e4-18b5-4e25-a169-f91e8058ee69-kube-api-access-7bmb5\") pod \"dfea36e4-18b5-4e25-a169-f91e8058ee69\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.469759 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"dfea36e4-18b5-4e25-a169-f91e8058ee69\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.469826 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-scripts\") pod \"dfea36e4-18b5-4e25-a169-f91e8058ee69\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.469856 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-config-data\") pod \"dfea36e4-18b5-4e25-a169-f91e8058ee69\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.469973 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-internal-tls-certs\") pod \"dfea36e4-18b5-4e25-a169-f91e8058ee69\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.469995 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dfea36e4-18b5-4e25-a169-f91e8058ee69-httpd-run\") pod \"dfea36e4-18b5-4e25-a169-f91e8058ee69\" (UID: \"dfea36e4-18b5-4e25-a169-f91e8058ee69\") " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.470872 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfea36e4-18b5-4e25-a169-f91e8058ee69-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "dfea36e4-18b5-4e25-a169-f91e8058ee69" (UID: "dfea36e4-18b5-4e25-a169-f91e8058ee69"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.471480 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfea36e4-18b5-4e25-a169-f91e8058ee69-logs" (OuterVolumeSpecName: "logs") pod "dfea36e4-18b5-4e25-a169-f91e8058ee69" (UID: "dfea36e4-18b5-4e25-a169-f91e8058ee69"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.485010 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfea36e4-18b5-4e25-a169-f91e8058ee69-kube-api-access-7bmb5" (OuterVolumeSpecName: "kube-api-access-7bmb5") pod "dfea36e4-18b5-4e25-a169-f91e8058ee69" (UID: "dfea36e4-18b5-4e25-a169-f91e8058ee69"). InnerVolumeSpecName "kube-api-access-7bmb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.485483 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-scripts" (OuterVolumeSpecName: "scripts") pod "dfea36e4-18b5-4e25-a169-f91e8058ee69" (UID: "dfea36e4-18b5-4e25-a169-f91e8058ee69"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.500801 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd" (OuterVolumeSpecName: "glance") pod "dfea36e4-18b5-4e25-a169-f91e8058ee69" (UID: "dfea36e4-18b5-4e25-a169-f91e8058ee69"). InnerVolumeSpecName "pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.535691 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dfea36e4-18b5-4e25-a169-f91e8058ee69" (UID: "dfea36e4-18b5-4e25-a169-f91e8058ee69"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.572210 4912 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") on node \"crc\" " Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.572243 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.572252 4912 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dfea36e4-18b5-4e25-a169-f91e8058ee69-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.572263 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.572271 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfea36e4-18b5-4e25-a169-f91e8058ee69-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.572279 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bmb5\" (UniqueName: \"kubernetes.io/projected/dfea36e4-18b5-4e25-a169-f91e8058ee69-kube-api-access-7bmb5\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.590719 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"931060bd-5dcf-4163-9da8-aed7374af480","Type":"ContainerDied","Data":"08b73f72942b6f6d81459a94414b4c8c57a3af4a7025cb6752a7af9be7e0f4ec"} Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.590801 4912 scope.go:117] "RemoveContainer" containerID="598783c498f4bc2640e24c42329e86797a5374c25e552dcf78371d0a1d067a6e" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.590998 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.598253 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gtnp8" event={"ID":"a2ed86f7-7769-4ccd-af95-9377719f1856","Type":"ContainerStarted","Data":"f6f03e13b36854441d66c7286d3569ff3885f677d2d27d7280bdf141c1f4ba81"} Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.598301 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gtnp8" event={"ID":"a2ed86f7-7769-4ccd-af95-9377719f1856","Type":"ContainerStarted","Data":"39a2e6cdd9aac2248043d6d7f46a7b30d84e9e001f8d9665652b97f26c3dceb3"} Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.603179 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8xbcq" event={"ID":"d876181b-ee71-4961-a40b-4bf1f634bc59","Type":"ContainerStarted","Data":"df01e32bea4de2c9712b84e9ebb4506beea520d5b7e0576c690735e4b56765ae"} Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.603219 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8xbcq" event={"ID":"d876181b-ee71-4961-a40b-4bf1f634bc59","Type":"ContainerStarted","Data":"1623be047287bf09679821317a61da10ef5699cbaeab396725ad4fc06c90d782"} Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.606379 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-72qk9" event={"ID":"5b451d56-df8d-43b1-bcc0-18aeba033358","Type":"ContainerStarted","Data":"1202e5557ce1ffbef4095b2155cf352a4e2a9bf1f34d8772bf748c6f438f922c"} Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.606414 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-72qk9" event={"ID":"5b451d56-df8d-43b1-bcc0-18aeba033358","Type":"ContainerStarted","Data":"153b67a45e8661d346724b5ee7729eae6656ff7611e25ec14f19fb2aeafb3c72"} Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.610748 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" event={"ID":"780887b8-a649-4d74-b050-43fc706b23cb","Type":"ContainerStarted","Data":"a3f6bd14d174271a07233dcc9def8c8f178831cb67e476547552e2296bc20ceb"} Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.610809 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" event={"ID":"780887b8-a649-4d74-b050-43fc706b23cb","Type":"ContainerStarted","Data":"4f01cb247bc7b5f4c9f12cf4d8f7b25f769d062acc6e16e4ce5c877321bb3af4"} Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.623794 4912 scope.go:117] "RemoveContainer" containerID="0a36acd2d6a74c1622042414d9487cf2bcb46fab5b6db7fdad9cf6670fea0fd0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.624800 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dfea36e4-18b5-4e25-a169-f91e8058ee69","Type":"ContainerDied","Data":"c2bdfe2ea8fec08547a925cb4d6fd017b8598823c2d7d430be6d6ea6f3e6b37e"} Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.624878 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.633729 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" event={"ID":"9c46d9ff-9bd3-434b-9917-db43f38a2320","Type":"ContainerStarted","Data":"2ddbbcdd054d720243e8d531a9e0ec3778be9c4d3380d8c8f3da31951f67fff3"} Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.635334 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-dc7c-account-create-update-x4cjw" event={"ID":"f5416848-8b0d-4e46-bdba-8286d1ba9c2c","Type":"ContainerStarted","Data":"968bd37be0d38955bb919ed80416c88200163687c89e93ecd338b8dba7787c51"} Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.648766 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-gtnp8" podStartSLOduration=6.648742987 podStartE2EDuration="6.648742987s" podCreationTimestamp="2025-12-08 21:42:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:23.618987693 +0000 UTC m=+1425.481989776" watchObservedRunningTime="2025-12-08 21:42:23.648742987 +0000 UTC m=+1425.511745070" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.651336 4912 scope.go:117] "RemoveContainer" containerID="51d517c990744464e25749722ed2c757dfeef6fe3ac767932698e077889a82c9" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.657384 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-72qk9" podStartSLOduration=6.657364729 podStartE2EDuration="6.657364729s" podCreationTimestamp="2025-12-08 21:42:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:23.639942411 +0000 UTC m=+1425.502944504" watchObservedRunningTime="2025-12-08 21:42:23.657364729 +0000 UTC m=+1425.520366812" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.673189 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" podStartSLOduration=5.673145454 podStartE2EDuration="5.673145454s" podCreationTimestamp="2025-12-08 21:42:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:23.65821318 +0000 UTC m=+1425.521215283" watchObservedRunningTime="2025-12-08 21:42:23.673145454 +0000 UTC m=+1425.536147537" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.684139 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-8xbcq" podStartSLOduration=6.683951431 podStartE2EDuration="6.683951431s" podCreationTimestamp="2025-12-08 21:42:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:23.673932434 +0000 UTC m=+1425.536934527" watchObservedRunningTime="2025-12-08 21:42:23.683951431 +0000 UTC m=+1425.546953514" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.694579 4912 scope.go:117] "RemoveContainer" containerID="4e04b31cf9796cf0e96c2c4f2db773b81af3dc958ecd94485556a6f4b88dbd0c" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.701225 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "dfea36e4-18b5-4e25-a169-f91e8058ee69" (UID: "dfea36e4-18b5-4e25-a169-f91e8058ee69"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.734888 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.745680 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.759854 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-config-data" (OuterVolumeSpecName: "config-data") pod "dfea36e4-18b5-4e25-a169-f91e8058ee69" (UID: "dfea36e4-18b5-4e25-a169-f91e8058ee69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.780281 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.780318 4912 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfea36e4-18b5-4e25-a169-f91e8058ee69-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.789002 4912 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.789570 4912 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd") on node "crc" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.795384 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:23 crc kubenswrapper[4912]: E1208 21:42:23.795868 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="931060bd-5dcf-4163-9da8-aed7374af480" containerName="glance-log" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.795885 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="931060bd-5dcf-4163-9da8-aed7374af480" containerName="glance-log" Dec 08 21:42:23 crc kubenswrapper[4912]: E1208 21:42:23.795911 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="931060bd-5dcf-4163-9da8-aed7374af480" containerName="glance-httpd" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.795920 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="931060bd-5dcf-4163-9da8-aed7374af480" containerName="glance-httpd" Dec 08 21:42:23 crc kubenswrapper[4912]: E1208 21:42:23.795993 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfea36e4-18b5-4e25-a169-f91e8058ee69" containerName="glance-log" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.796003 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfea36e4-18b5-4e25-a169-f91e8058ee69" containerName="glance-log" Dec 08 21:42:23 crc kubenswrapper[4912]: E1208 21:42:23.796016 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfea36e4-18b5-4e25-a169-f91e8058ee69" containerName="glance-httpd" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.796023 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfea36e4-18b5-4e25-a169-f91e8058ee69" containerName="glance-httpd" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.798593 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfea36e4-18b5-4e25-a169-f91e8058ee69" containerName="glance-httpd" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.798623 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfea36e4-18b5-4e25-a169-f91e8058ee69" containerName="glance-log" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.798635 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="931060bd-5dcf-4163-9da8-aed7374af480" containerName="glance-log" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.798644 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="931060bd-5dcf-4163-9da8-aed7374af480" containerName="glance-httpd" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.800580 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.808259 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.808477 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.823127 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.883598 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/994c9f45-076f-4a96-a8a7-b9e15b90893a-logs\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.883649 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/994c9f45-076f-4a96-a8a7-b9e15b90893a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.883804 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994c9f45-076f-4a96-a8a7-b9e15b90893a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.883888 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/994c9f45-076f-4a96-a8a7-b9e15b90893a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.883935 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.884068 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-989cx\" (UniqueName: \"kubernetes.io/projected/994c9f45-076f-4a96-a8a7-b9e15b90893a-kube-api-access-989cx\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.884238 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/994c9f45-076f-4a96-a8a7-b9e15b90893a-scripts\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.884622 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994c9f45-076f-4a96-a8a7-b9e15b90893a-config-data\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.884789 4912 reconciler_common.go:293] "Volume detached for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.986429 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/994c9f45-076f-4a96-a8a7-b9e15b90893a-logs\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.986483 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/994c9f45-076f-4a96-a8a7-b9e15b90893a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.986520 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994c9f45-076f-4a96-a8a7-b9e15b90893a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.986546 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/994c9f45-076f-4a96-a8a7-b9e15b90893a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.986570 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.986600 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-989cx\" (UniqueName: \"kubernetes.io/projected/994c9f45-076f-4a96-a8a7-b9e15b90893a-kube-api-access-989cx\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.986661 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/994c9f45-076f-4a96-a8a7-b9e15b90893a-scripts\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.986723 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994c9f45-076f-4a96-a8a7-b9e15b90893a-config-data\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.987024 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/994c9f45-076f-4a96-a8a7-b9e15b90893a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.987470 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/994c9f45-076f-4a96-a8a7-b9e15b90893a-logs\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.992052 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/994c9f45-076f-4a96-a8a7-b9e15b90893a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.992310 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994c9f45-076f-4a96-a8a7-b9e15b90893a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.992815 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994c9f45-076f-4a96-a8a7-b9e15b90893a-config-data\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.992903 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/994c9f45-076f-4a96-a8a7-b9e15b90893a-scripts\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.992990 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:42:23 crc kubenswrapper[4912]: I1208 21:42:23.993046 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f04dae7735d5049d0d88a291850a74e98319e9b887dd9c8e9e7d7d4d3762e2c6/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.006581 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-989cx\" (UniqueName: \"kubernetes.io/projected/994c9f45-076f-4a96-a8a7-b9e15b90893a-kube-api-access-989cx\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.055489 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f73b914b-b926-44ad-a1ae-1553428892b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f73b914b-b926-44ad-a1ae-1553428892b0\") pod \"glance-default-external-api-0\" (UID: \"994c9f45-076f-4a96-a8a7-b9e15b90893a\") " pod="openstack/glance-default-external-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.307164 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.331727 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.354334 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.389776 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.391423 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.393485 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.396065 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.405048 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.451097 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="931060bd-5dcf-4163-9da8-aed7374af480" path="/var/lib/kubelet/pods/931060bd-5dcf-4163-9da8-aed7374af480/volumes" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.455699 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfea36e4-18b5-4e25-a169-f91e8058ee69" path="/var/lib/kubelet/pods/dfea36e4-18b5-4e25-a169-f91e8058ee69/volumes" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.501870 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwrjc\" (UniqueName: \"kubernetes.io/projected/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-kube-api-access-gwrjc\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.502216 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.502284 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.502301 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.502399 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.502437 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-logs\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.502504 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.502524 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.606470 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.606517 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.606599 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.606640 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-logs\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.606703 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.606730 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.606884 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwrjc\" (UniqueName: \"kubernetes.io/projected/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-kube-api-access-gwrjc\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.606931 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.609111 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.609616 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-logs\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.614682 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.615560 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.616705 4912 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.616765 4912 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0f170b0bc0d5e657f5d6976432df9f5b93559f3f6e739297a830b8d1908e7cee/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.616713 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.617802 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.629782 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwrjc\" (UniqueName: \"kubernetes.io/projected/3ecb7bb8-b114-4de3-ba10-ea9537d3daa4-kube-api-access-gwrjc\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.654262 4912 generic.go:334] "Generic (PLEG): container finished" podID="a2ed86f7-7769-4ccd-af95-9377719f1856" containerID="f6f03e13b36854441d66c7286d3569ff3885f677d2d27d7280bdf141c1f4ba81" exitCode=0 Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.654348 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gtnp8" event={"ID":"a2ed86f7-7769-4ccd-af95-9377719f1856","Type":"ContainerDied","Data":"f6f03e13b36854441d66c7286d3569ff3885f677d2d27d7280bdf141c1f4ba81"} Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.657380 4912 generic.go:334] "Generic (PLEG): container finished" podID="d876181b-ee71-4961-a40b-4bf1f634bc59" containerID="df01e32bea4de2c9712b84e9ebb4506beea520d5b7e0576c690735e4b56765ae" exitCode=0 Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.657460 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8xbcq" event={"ID":"d876181b-ee71-4961-a40b-4bf1f634bc59","Type":"ContainerDied","Data":"df01e32bea4de2c9712b84e9ebb4506beea520d5b7e0576c690735e4b56765ae"} Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.660859 4912 generic.go:334] "Generic (PLEG): container finished" podID="5b451d56-df8d-43b1-bcc0-18aeba033358" containerID="1202e5557ce1ffbef4095b2155cf352a4e2a9bf1f34d8772bf748c6f438f922c" exitCode=0 Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.660917 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-72qk9" event={"ID":"5b451d56-df8d-43b1-bcc0-18aeba033358","Type":"ContainerDied","Data":"1202e5557ce1ffbef4095b2155cf352a4e2a9bf1f34d8772bf748c6f438f922c"} Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.662861 4912 generic.go:334] "Generic (PLEG): container finished" podID="780887b8-a649-4d74-b050-43fc706b23cb" containerID="a3f6bd14d174271a07233dcc9def8c8f178831cb67e476547552e2296bc20ceb" exitCode=0 Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.662974 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" event={"ID":"780887b8-a649-4d74-b050-43fc706b23cb","Type":"ContainerDied","Data":"a3f6bd14d174271a07233dcc9def8c8f178831cb67e476547552e2296bc20ceb"} Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.673259 4912 generic.go:334] "Generic (PLEG): container finished" podID="9c46d9ff-9bd3-434b-9917-db43f38a2320" containerID="14571e613324d72cc87a6dabf81784b547cd9c94668a59c5c2b880babc610b8f" exitCode=0 Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.673624 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" event={"ID":"9c46d9ff-9bd3-434b-9917-db43f38a2320","Type":"ContainerDied","Data":"14571e613324d72cc87a6dabf81784b547cd9c94668a59c5c2b880babc610b8f"} Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.673871 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c02fe364-29b9-4e60-8c58-ab5fdb227efd\") pod \"glance-default-internal-api-0\" (UID: \"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4\") " pod="openstack/glance-default-internal-api-0" Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.677736 4912 generic.go:334] "Generic (PLEG): container finished" podID="f5416848-8b0d-4e46-bdba-8286d1ba9c2c" containerID="7b8720abc03f91aae958786894395274ea4e82dd352a8aeb6ca46a8b262dcc98" exitCode=0 Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.677783 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-dc7c-account-create-update-x4cjw" event={"ID":"f5416848-8b0d-4e46-bdba-8286d1ba9c2c","Type":"ContainerDied","Data":"7b8720abc03f91aae958786894395274ea4e82dd352a8aeb6ca46a8b262dcc98"} Dec 08 21:42:24 crc kubenswrapper[4912]: I1208 21:42:24.754586 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:25 crc kubenswrapper[4912]: I1208 21:42:25.040187 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 08 21:42:25 crc kubenswrapper[4912]: I1208 21:42:25.372808 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 08 21:42:25 crc kubenswrapper[4912]: W1208 21:42:25.387856 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ecb7bb8_b114_4de3_ba10_ea9537d3daa4.slice/crio-ada17336e9aac5b401e13d0080aee8d6870a19404f4d608926626d1aca66ee28 WatchSource:0}: Error finding container ada17336e9aac5b401e13d0080aee8d6870a19404f4d608926626d1aca66ee28: Status 404 returned error can't find the container with id ada17336e9aac5b401e13d0080aee8d6870a19404f4d608926626d1aca66ee28 Dec 08 21:42:25 crc kubenswrapper[4912]: I1208 21:42:25.709773 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"994c9f45-076f-4a96-a8a7-b9e15b90893a","Type":"ContainerStarted","Data":"37fc85ca432997770c3369e72855d3795a3c7b047012babb94c6927f7dad98d8"} Dec 08 21:42:25 crc kubenswrapper[4912]: I1208 21:42:25.712701 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4","Type":"ContainerStarted","Data":"ada17336e9aac5b401e13d0080aee8d6870a19404f4d608926626d1aca66ee28"} Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.215199 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-dc7c-account-create-update-x4cjw" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.249858 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcxb7\" (UniqueName: \"kubernetes.io/projected/f5416848-8b0d-4e46-bdba-8286d1ba9c2c-kube-api-access-zcxb7\") pod \"f5416848-8b0d-4e46-bdba-8286d1ba9c2c\" (UID: \"f5416848-8b0d-4e46-bdba-8286d1ba9c2c\") " Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.250009 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5416848-8b0d-4e46-bdba-8286d1ba9c2c-operator-scripts\") pod \"f5416848-8b0d-4e46-bdba-8286d1ba9c2c\" (UID: \"f5416848-8b0d-4e46-bdba-8286d1ba9c2c\") " Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.251288 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5416848-8b0d-4e46-bdba-8286d1ba9c2c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f5416848-8b0d-4e46-bdba-8286d1ba9c2c" (UID: "f5416848-8b0d-4e46-bdba-8286d1ba9c2c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.258707 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5416848-8b0d-4e46-bdba-8286d1ba9c2c-kube-api-access-zcxb7" (OuterVolumeSpecName: "kube-api-access-zcxb7") pod "f5416848-8b0d-4e46-bdba-8286d1ba9c2c" (UID: "f5416848-8b0d-4e46-bdba-8286d1ba9c2c"). InnerVolumeSpecName "kube-api-access-zcxb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.353460 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcxb7\" (UniqueName: \"kubernetes.io/projected/f5416848-8b0d-4e46-bdba-8286d1ba9c2c-kube-api-access-zcxb7\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.353504 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5416848-8b0d-4e46-bdba-8286d1ba9c2c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.401546 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-72qk9" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.407512 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.456219 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9nms\" (UniqueName: \"kubernetes.io/projected/5b451d56-df8d-43b1-bcc0-18aeba033358-kube-api-access-l9nms\") pod \"5b451d56-df8d-43b1-bcc0-18aeba033358\" (UID: \"5b451d56-df8d-43b1-bcc0-18aeba033358\") " Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.456282 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b451d56-df8d-43b1-bcc0-18aeba033358-operator-scripts\") pod \"5b451d56-df8d-43b1-bcc0-18aeba033358\" (UID: \"5b451d56-df8d-43b1-bcc0-18aeba033358\") " Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.456380 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9dlv\" (UniqueName: \"kubernetes.io/projected/780887b8-a649-4d74-b050-43fc706b23cb-kube-api-access-z9dlv\") pod \"780887b8-a649-4d74-b050-43fc706b23cb\" (UID: \"780887b8-a649-4d74-b050-43fc706b23cb\") " Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.456512 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/780887b8-a649-4d74-b050-43fc706b23cb-operator-scripts\") pod \"780887b8-a649-4d74-b050-43fc706b23cb\" (UID: \"780887b8-a649-4d74-b050-43fc706b23cb\") " Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.460384 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/780887b8-a649-4d74-b050-43fc706b23cb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "780887b8-a649-4d74-b050-43fc706b23cb" (UID: "780887b8-a649-4d74-b050-43fc706b23cb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.469058 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b451d56-df8d-43b1-bcc0-18aeba033358-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5b451d56-df8d-43b1-bcc0-18aeba033358" (UID: "5b451d56-df8d-43b1-bcc0-18aeba033358"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.473234 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/780887b8-a649-4d74-b050-43fc706b23cb-kube-api-access-z9dlv" (OuterVolumeSpecName: "kube-api-access-z9dlv") pod "780887b8-a649-4d74-b050-43fc706b23cb" (UID: "780887b8-a649-4d74-b050-43fc706b23cb"). InnerVolumeSpecName "kube-api-access-z9dlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.473728 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b451d56-df8d-43b1-bcc0-18aeba033358-kube-api-access-l9nms" (OuterVolumeSpecName: "kube-api-access-l9nms") pod "5b451d56-df8d-43b1-bcc0-18aeba033358" (UID: "5b451d56-df8d-43b1-bcc0-18aeba033358"). InnerVolumeSpecName "kube-api-access-l9nms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.488817 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gtnp8" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.519486 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8xbcq" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.525660 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.557955 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d876181b-ee71-4961-a40b-4bf1f634bc59-operator-scripts\") pod \"d876181b-ee71-4961-a40b-4bf1f634bc59\" (UID: \"d876181b-ee71-4961-a40b-4bf1f634bc59\") " Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.558080 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c46d9ff-9bd3-434b-9917-db43f38a2320-operator-scripts\") pod \"9c46d9ff-9bd3-434b-9917-db43f38a2320\" (UID: \"9c46d9ff-9bd3-434b-9917-db43f38a2320\") " Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.558216 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hq5f5\" (UniqueName: \"kubernetes.io/projected/d876181b-ee71-4961-a40b-4bf1f634bc59-kube-api-access-hq5f5\") pod \"d876181b-ee71-4961-a40b-4bf1f634bc59\" (UID: \"d876181b-ee71-4961-a40b-4bf1f634bc59\") " Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.558281 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ed86f7-7769-4ccd-af95-9377719f1856-operator-scripts\") pod \"a2ed86f7-7769-4ccd-af95-9377719f1856\" (UID: \"a2ed86f7-7769-4ccd-af95-9377719f1856\") " Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.558463 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pv94\" (UniqueName: \"kubernetes.io/projected/9c46d9ff-9bd3-434b-9917-db43f38a2320-kube-api-access-8pv94\") pod \"9c46d9ff-9bd3-434b-9917-db43f38a2320\" (UID: \"9c46d9ff-9bd3-434b-9917-db43f38a2320\") " Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.558537 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwp55\" (UniqueName: \"kubernetes.io/projected/a2ed86f7-7769-4ccd-af95-9377719f1856-kube-api-access-dwp55\") pod \"a2ed86f7-7769-4ccd-af95-9377719f1856\" (UID: \"a2ed86f7-7769-4ccd-af95-9377719f1856\") " Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.559207 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2ed86f7-7769-4ccd-af95-9377719f1856-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a2ed86f7-7769-4ccd-af95-9377719f1856" (UID: "a2ed86f7-7769-4ccd-af95-9377719f1856"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.559822 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c46d9ff-9bd3-434b-9917-db43f38a2320-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9c46d9ff-9bd3-434b-9917-db43f38a2320" (UID: "9c46d9ff-9bd3-434b-9917-db43f38a2320"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.560183 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/780887b8-a649-4d74-b050-43fc706b23cb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.560214 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9nms\" (UniqueName: \"kubernetes.io/projected/5b451d56-df8d-43b1-bcc0-18aeba033358-kube-api-access-l9nms\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.560230 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b451d56-df8d-43b1-bcc0-18aeba033358-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.560242 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c46d9ff-9bd3-434b-9917-db43f38a2320-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.560253 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9dlv\" (UniqueName: \"kubernetes.io/projected/780887b8-a649-4d74-b050-43fc706b23cb-kube-api-access-z9dlv\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.560265 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ed86f7-7769-4ccd-af95-9377719f1856-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.562117 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d876181b-ee71-4961-a40b-4bf1f634bc59-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d876181b-ee71-4961-a40b-4bf1f634bc59" (UID: "d876181b-ee71-4961-a40b-4bf1f634bc59"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.570312 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d876181b-ee71-4961-a40b-4bf1f634bc59-kube-api-access-hq5f5" (OuterVolumeSpecName: "kube-api-access-hq5f5") pod "d876181b-ee71-4961-a40b-4bf1f634bc59" (UID: "d876181b-ee71-4961-a40b-4bf1f634bc59"). InnerVolumeSpecName "kube-api-access-hq5f5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.571217 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2ed86f7-7769-4ccd-af95-9377719f1856-kube-api-access-dwp55" (OuterVolumeSpecName: "kube-api-access-dwp55") pod "a2ed86f7-7769-4ccd-af95-9377719f1856" (UID: "a2ed86f7-7769-4ccd-af95-9377719f1856"). InnerVolumeSpecName "kube-api-access-dwp55". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.574266 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c46d9ff-9bd3-434b-9917-db43f38a2320-kube-api-access-8pv94" (OuterVolumeSpecName: "kube-api-access-8pv94") pod "9c46d9ff-9bd3-434b-9917-db43f38a2320" (UID: "9c46d9ff-9bd3-434b-9917-db43f38a2320"). InnerVolumeSpecName "kube-api-access-8pv94". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.663170 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hq5f5\" (UniqueName: \"kubernetes.io/projected/d876181b-ee71-4961-a40b-4bf1f634bc59-kube-api-access-hq5f5\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.663237 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pv94\" (UniqueName: \"kubernetes.io/projected/9c46d9ff-9bd3-434b-9917-db43f38a2320-kube-api-access-8pv94\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.663255 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwp55\" (UniqueName: \"kubernetes.io/projected/a2ed86f7-7769-4ccd-af95-9377719f1856-kube-api-access-dwp55\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.663270 4912 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d876181b-ee71-4961-a40b-4bf1f634bc59-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.727921 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8xbcq" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.728859 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8xbcq" event={"ID":"d876181b-ee71-4961-a40b-4bf1f634bc59","Type":"ContainerDied","Data":"1623be047287bf09679821317a61da10ef5699cbaeab396725ad4fc06c90d782"} Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.728913 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1623be047287bf09679821317a61da10ef5699cbaeab396725ad4fc06c90d782" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.734987 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-72qk9" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.735174 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-72qk9" event={"ID":"5b451d56-df8d-43b1-bcc0-18aeba033358","Type":"ContainerDied","Data":"153b67a45e8661d346724b5ee7729eae6656ff7611e25ec14f19fb2aeafb3c72"} Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.735198 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="153b67a45e8661d346724b5ee7729eae6656ff7611e25ec14f19fb2aeafb3c72" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.739299 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" event={"ID":"780887b8-a649-4d74-b050-43fc706b23cb","Type":"ContainerDied","Data":"4f01cb247bc7b5f4c9f12cf4d8f7b25f769d062acc6e16e4ce5c877321bb3af4"} Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.739322 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f01cb247bc7b5f4c9f12cf4d8f7b25f769d062acc6e16e4ce5c877321bb3af4" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.739368 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d03c-account-create-update-5xf7p" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.743509 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" event={"ID":"9c46d9ff-9bd3-434b-9917-db43f38a2320","Type":"ContainerDied","Data":"2ddbbcdd054d720243e8d531a9e0ec3778be9c4d3380d8c8f3da31951f67fff3"} Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.743545 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ddbbcdd054d720243e8d531a9e0ec3778be9c4d3380d8c8f3da31951f67fff3" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.743525 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-29a0-account-create-update-8ftmm" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.745618 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-dc7c-account-create-update-x4cjw" event={"ID":"f5416848-8b0d-4e46-bdba-8286d1ba9c2c","Type":"ContainerDied","Data":"968bd37be0d38955bb919ed80416c88200163687c89e93ecd338b8dba7787c51"} Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.745636 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="968bd37be0d38955bb919ed80416c88200163687c89e93ecd338b8dba7787c51" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.745684 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-dc7c-account-create-update-x4cjw" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.750340 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4","Type":"ContainerStarted","Data":"c2aa3cfaafd9be115922486d104d17f537e477cda4ea53073078ee6e389fa669"} Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.751741 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"994c9f45-076f-4a96-a8a7-b9e15b90893a","Type":"ContainerStarted","Data":"09198ce8de58c8590a874b5eba6c99f81495e6c82782f18d5cdc0ae9af1ec584"} Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.751760 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"994c9f45-076f-4a96-a8a7-b9e15b90893a","Type":"ContainerStarted","Data":"c538f5225017a6e863baaa4913041e129ff0e52158a8a477015520569c8bfdf0"} Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.753505 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gtnp8" event={"ID":"a2ed86f7-7769-4ccd-af95-9377719f1856","Type":"ContainerDied","Data":"39a2e6cdd9aac2248043d6d7f46a7b30d84e9e001f8d9665652b97f26c3dceb3"} Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.753526 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39a2e6cdd9aac2248043d6d7f46a7b30d84e9e001f8d9665652b97f26c3dceb3" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.753569 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gtnp8" Dec 08 21:42:26 crc kubenswrapper[4912]: I1208 21:42:26.797417 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.797391803 podStartE2EDuration="3.797391803s" podCreationTimestamp="2025-12-08 21:42:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:26.778392535 +0000 UTC m=+1428.641394618" watchObservedRunningTime="2025-12-08 21:42:26.797391803 +0000 UTC m=+1428.660393886" Dec 08 21:42:27 crc kubenswrapper[4912]: I1208 21:42:27.764010 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3ecb7bb8-b114-4de3-ba10-ea9537d3daa4","Type":"ContainerStarted","Data":"32229923130fc8a09ea15bc25c6cea3344a1153cc353418b881a882f4088bd1d"} Dec 08 21:42:27 crc kubenswrapper[4912]: I1208 21:42:27.790507 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.790482215 podStartE2EDuration="3.790482215s" podCreationTimestamp="2025-12-08 21:42:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:42:27.784348077 +0000 UTC m=+1429.647350160" watchObservedRunningTime="2025-12-08 21:42:27.790482215 +0000 UTC m=+1429.653484308" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.649580 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lkdlz"] Dec 08 21:42:28 crc kubenswrapper[4912]: E1208 21:42:28.650230 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2ed86f7-7769-4ccd-af95-9377719f1856" containerName="mariadb-database-create" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.650247 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2ed86f7-7769-4ccd-af95-9377719f1856" containerName="mariadb-database-create" Dec 08 21:42:28 crc kubenswrapper[4912]: E1208 21:42:28.650267 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="780887b8-a649-4d74-b050-43fc706b23cb" containerName="mariadb-account-create-update" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.650274 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="780887b8-a649-4d74-b050-43fc706b23cb" containerName="mariadb-account-create-update" Dec 08 21:42:28 crc kubenswrapper[4912]: E1208 21:42:28.650284 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c46d9ff-9bd3-434b-9917-db43f38a2320" containerName="mariadb-account-create-update" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.650290 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c46d9ff-9bd3-434b-9917-db43f38a2320" containerName="mariadb-account-create-update" Dec 08 21:42:28 crc kubenswrapper[4912]: E1208 21:42:28.650308 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5416848-8b0d-4e46-bdba-8286d1ba9c2c" containerName="mariadb-account-create-update" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.650313 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5416848-8b0d-4e46-bdba-8286d1ba9c2c" containerName="mariadb-account-create-update" Dec 08 21:42:28 crc kubenswrapper[4912]: E1208 21:42:28.650327 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d876181b-ee71-4961-a40b-4bf1f634bc59" containerName="mariadb-database-create" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.650333 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="d876181b-ee71-4961-a40b-4bf1f634bc59" containerName="mariadb-database-create" Dec 08 21:42:28 crc kubenswrapper[4912]: E1208 21:42:28.650344 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b451d56-df8d-43b1-bcc0-18aeba033358" containerName="mariadb-database-create" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.650350 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b451d56-df8d-43b1-bcc0-18aeba033358" containerName="mariadb-database-create" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.650520 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2ed86f7-7769-4ccd-af95-9377719f1856" containerName="mariadb-database-create" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.650532 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="780887b8-a649-4d74-b050-43fc706b23cb" containerName="mariadb-account-create-update" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.650543 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5416848-8b0d-4e46-bdba-8286d1ba9c2c" containerName="mariadb-account-create-update" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.650554 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="d876181b-ee71-4961-a40b-4bf1f634bc59" containerName="mariadb-database-create" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.650566 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b451d56-df8d-43b1-bcc0-18aeba033358" containerName="mariadb-database-create" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.650576 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c46d9ff-9bd3-434b-9917-db43f38a2320" containerName="mariadb-account-create-update" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.651650 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.653548 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-jmq8c" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.653860 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.659839 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.665063 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lkdlz"] Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.720751 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jmgf\" (UniqueName: \"kubernetes.io/projected/8fab798f-4b32-43d8-bb00-cb1557b45f4f-kube-api-access-8jmgf\") pod \"nova-cell0-conductor-db-sync-lkdlz\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.720818 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-scripts\") pod \"nova-cell0-conductor-db-sync-lkdlz\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.720840 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-lkdlz\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.720874 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-config-data\") pod \"nova-cell0-conductor-db-sync-lkdlz\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.773963 4912 generic.go:334] "Generic (PLEG): container finished" podID="6a6ad257-bd45-428d-9010-7bc9e98cd463" containerID="46bdf0e0c8682b0558c86c0c24c8efa3c607dd557b34f4c6db9a514f63f4529a" exitCode=0 Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.774874 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-688d6dd778-96vcb" event={"ID":"6a6ad257-bd45-428d-9010-7bc9e98cd463","Type":"ContainerDied","Data":"46bdf0e0c8682b0558c86c0c24c8efa3c607dd557b34f4c6db9a514f63f4529a"} Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.823293 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jmgf\" (UniqueName: \"kubernetes.io/projected/8fab798f-4b32-43d8-bb00-cb1557b45f4f-kube-api-access-8jmgf\") pod \"nova-cell0-conductor-db-sync-lkdlz\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.823357 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-scripts\") pod \"nova-cell0-conductor-db-sync-lkdlz\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.823404 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-lkdlz\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.823447 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-config-data\") pod \"nova-cell0-conductor-db-sync-lkdlz\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.837268 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-lkdlz\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.837725 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-scripts\") pod \"nova-cell0-conductor-db-sync-lkdlz\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.839612 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-config-data\") pod \"nova-cell0-conductor-db-sync-lkdlz\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.840192 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jmgf\" (UniqueName: \"kubernetes.io/projected/8fab798f-4b32-43d8-bb00-cb1557b45f4f-kube-api-access-8jmgf\") pod \"nova-cell0-conductor-db-sync-lkdlz\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:28 crc kubenswrapper[4912]: I1208 21:42:28.968338 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.072384 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.127998 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-combined-ca-bundle\") pod \"6a6ad257-bd45-428d-9010-7bc9e98cd463\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.128075 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-config\") pod \"6a6ad257-bd45-428d-9010-7bc9e98cd463\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.128224 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-httpd-config\") pod \"6a6ad257-bd45-428d-9010-7bc9e98cd463\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.128284 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nn9fl\" (UniqueName: \"kubernetes.io/projected/6a6ad257-bd45-428d-9010-7bc9e98cd463-kube-api-access-nn9fl\") pod \"6a6ad257-bd45-428d-9010-7bc9e98cd463\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.128310 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-ovndb-tls-certs\") pod \"6a6ad257-bd45-428d-9010-7bc9e98cd463\" (UID: \"6a6ad257-bd45-428d-9010-7bc9e98cd463\") " Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.134065 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "6a6ad257-bd45-428d-9010-7bc9e98cd463" (UID: "6a6ad257-bd45-428d-9010-7bc9e98cd463"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.134313 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a6ad257-bd45-428d-9010-7bc9e98cd463-kube-api-access-nn9fl" (OuterVolumeSpecName: "kube-api-access-nn9fl") pod "6a6ad257-bd45-428d-9010-7bc9e98cd463" (UID: "6a6ad257-bd45-428d-9010-7bc9e98cd463"). InnerVolumeSpecName "kube-api-access-nn9fl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.214174 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-config" (OuterVolumeSpecName: "config") pod "6a6ad257-bd45-428d-9010-7bc9e98cd463" (UID: "6a6ad257-bd45-428d-9010-7bc9e98cd463"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.230504 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6a6ad257-bd45-428d-9010-7bc9e98cd463" (UID: "6a6ad257-bd45-428d-9010-7bc9e98cd463"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.231626 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.231659 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.231669 4912 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.231678 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nn9fl\" (UniqueName: \"kubernetes.io/projected/6a6ad257-bd45-428d-9010-7bc9e98cd463-kube-api-access-nn9fl\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.251897 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lkdlz"] Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.256261 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "6a6ad257-bd45-428d-9010-7bc9e98cd463" (UID: "6a6ad257-bd45-428d-9010-7bc9e98cd463"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.333061 4912 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6ad257-bd45-428d-9010-7bc9e98cd463-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.783915 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-688d6dd778-96vcb" event={"ID":"6a6ad257-bd45-428d-9010-7bc9e98cd463","Type":"ContainerDied","Data":"454abebeaab9df0133819e21c2c78fc103362065cef501ef02df6c79d4120ed6"} Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.783957 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-688d6dd778-96vcb" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.784231 4912 scope.go:117] "RemoveContainer" containerID="8e8d05a39de6569a5bad68e352ba97aa7e0d09d3dbb6c332f0e86f04b68c88d0" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.785627 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-lkdlz" event={"ID":"8fab798f-4b32-43d8-bb00-cb1557b45f4f","Type":"ContainerStarted","Data":"3e924512f8b1d469f62a929ef3104c1b886bcd16886744eebcbb12a8908bf5e2"} Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.822883 4912 scope.go:117] "RemoveContainer" containerID="46bdf0e0c8682b0558c86c0c24c8efa3c607dd557b34f4c6db9a514f63f4529a" Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.829920 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-688d6dd778-96vcb"] Dec 08 21:42:29 crc kubenswrapper[4912]: I1208 21:42:29.837801 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-688d6dd778-96vcb"] Dec 08 21:42:30 crc kubenswrapper[4912]: I1208 21:42:30.445050 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a6ad257-bd45-428d-9010-7bc9e98cd463" path="/var/lib/kubelet/pods/6a6ad257-bd45-428d-9010-7bc9e98cd463/volumes" Dec 08 21:42:30 crc kubenswrapper[4912]: E1208 21:42:30.612317 4912 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4574c96_aa92_4621_92e2_d8ee041d94c8.slice/crio-conmon-34432d647fab3a38b1d37480eb79f732e8f8464ca53533ee285cf5d5eedbbd60.scope\": RecentStats: unable to find data in memory cache]" Dec 08 21:42:32 crc kubenswrapper[4912]: I1208 21:42:32.965768 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:42:32 crc kubenswrapper[4912]: I1208 21:42:32.966102 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:42:34 crc kubenswrapper[4912]: I1208 21:42:34.308121 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 08 21:42:34 crc kubenswrapper[4912]: I1208 21:42:34.308220 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 08 21:42:34 crc kubenswrapper[4912]: I1208 21:42:34.349367 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 08 21:42:34 crc kubenswrapper[4912]: I1208 21:42:34.379798 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 08 21:42:34 crc kubenswrapper[4912]: I1208 21:42:34.755106 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:34 crc kubenswrapper[4912]: I1208 21:42:34.755414 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:34 crc kubenswrapper[4912]: I1208 21:42:34.854786 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 08 21:42:34 crc kubenswrapper[4912]: I1208 21:42:34.854817 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 08 21:42:34 crc kubenswrapper[4912]: I1208 21:42:34.900491 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:34 crc kubenswrapper[4912]: I1208 21:42:34.901289 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:34 crc kubenswrapper[4912]: I1208 21:42:34.913275 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:35 crc kubenswrapper[4912]: I1208 21:42:35.863974 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:36 crc kubenswrapper[4912]: I1208 21:42:36.875249 4912 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:42:36 crc kubenswrapper[4912]: I1208 21:42:36.875587 4912 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:42:36 crc kubenswrapper[4912]: I1208 21:42:36.875961 4912 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:42:37 crc kubenswrapper[4912]: I1208 21:42:37.120747 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 08 21:42:37 crc kubenswrapper[4912]: I1208 21:42:37.249223 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 08 21:42:37 crc kubenswrapper[4912]: I1208 21:42:37.531870 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:37 crc kubenswrapper[4912]: I1208 21:42:37.885160 4912 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 21:42:39 crc kubenswrapper[4912]: I1208 21:42:39.032518 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 08 21:42:39 crc kubenswrapper[4912]: I1208 21:42:39.905239 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-lkdlz" event={"ID":"8fab798f-4b32-43d8-bb00-cb1557b45f4f","Type":"ContainerStarted","Data":"1e97ccf49707cce920e248d3ec4c378fef0738c73e5b7434ec4b4d8bef882017"} Dec 08 21:42:39 crc kubenswrapper[4912]: I1208 21:42:39.926417 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-lkdlz" podStartSLOduration=2.130549787 podStartE2EDuration="11.926395669s" podCreationTimestamp="2025-12-08 21:42:28 +0000 UTC" firstStartedPulling="2025-12-08 21:42:29.256234585 +0000 UTC m=+1431.119236668" lastFinishedPulling="2025-12-08 21:42:39.052080467 +0000 UTC m=+1440.915082550" observedRunningTime="2025-12-08 21:42:39.917514921 +0000 UTC m=+1441.780517004" watchObservedRunningTime="2025-12-08 21:42:39.926395669 +0000 UTC m=+1441.789397752" Dec 08 21:42:42 crc kubenswrapper[4912]: I1208 21:42:42.936726 4912 generic.go:334] "Generic (PLEG): container finished" podID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" containerID="38f41ffe6e495e48d03b9058b53b45cbc362a8af403a678ee846d41d145aba9f" exitCode=1 Dec 08 21:42:42 crc kubenswrapper[4912]: I1208 21:42:42.936800 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerDied","Data":"38f41ffe6e495e48d03b9058b53b45cbc362a8af403a678ee846d41d145aba9f"} Dec 08 21:42:42 crc kubenswrapper[4912]: I1208 21:42:42.937317 4912 scope.go:117] "RemoveContainer" containerID="3156642001f58269f8a3ebad775c19bf127b72340dcaa7f55593e3f609abdf3b" Dec 08 21:42:42 crc kubenswrapper[4912]: I1208 21:42:42.938276 4912 scope.go:117] "RemoveContainer" containerID="38f41ffe6e495e48d03b9058b53b45cbc362a8af403a678ee846d41d145aba9f" Dec 08 21:42:42 crc kubenswrapper[4912]: E1208 21:42:42.938725 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:42:45 crc kubenswrapper[4912]: I1208 21:42:45.248916 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:42:45 crc kubenswrapper[4912]: I1208 21:42:45.250122 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:42:45 crc kubenswrapper[4912]: I1208 21:42:45.251271 4912 scope.go:117] "RemoveContainer" containerID="38f41ffe6e495e48d03b9058b53b45cbc362a8af403a678ee846d41d145aba9f" Dec 08 21:42:45 crc kubenswrapper[4912]: E1208 21:42:45.251571 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:42:45 crc kubenswrapper[4912]: I1208 21:42:45.962488 4912 scope.go:117] "RemoveContainer" containerID="38f41ffe6e495e48d03b9058b53b45cbc362a8af403a678ee846d41d145aba9f" Dec 08 21:42:45 crc kubenswrapper[4912]: E1208 21:42:45.962804 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.231909 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-f2d7r"] Dec 08 21:42:47 crc kubenswrapper[4912]: E1208 21:42:47.233651 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a6ad257-bd45-428d-9010-7bc9e98cd463" containerName="neutron-api" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.233732 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a6ad257-bd45-428d-9010-7bc9e98cd463" containerName="neutron-api" Dec 08 21:42:47 crc kubenswrapper[4912]: E1208 21:42:47.233803 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a6ad257-bd45-428d-9010-7bc9e98cd463" containerName="neutron-httpd" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.233861 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a6ad257-bd45-428d-9010-7bc9e98cd463" containerName="neutron-httpd" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.234731 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a6ad257-bd45-428d-9010-7bc9e98cd463" containerName="neutron-api" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.234841 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a6ad257-bd45-428d-9010-7bc9e98cd463" containerName="neutron-httpd" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.236146 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.251280 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f2d7r"] Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.312797 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cfmg\" (UniqueName: \"kubernetes.io/projected/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-kube-api-access-8cfmg\") pod \"redhat-operators-f2d7r\" (UID: \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\") " pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.312901 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-utilities\") pod \"redhat-operators-f2d7r\" (UID: \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\") " pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.313197 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-catalog-content\") pod \"redhat-operators-f2d7r\" (UID: \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\") " pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.415492 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-catalog-content\") pod \"redhat-operators-f2d7r\" (UID: \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\") " pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.415596 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cfmg\" (UniqueName: \"kubernetes.io/projected/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-kube-api-access-8cfmg\") pod \"redhat-operators-f2d7r\" (UID: \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\") " pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.415652 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-utilities\") pod \"redhat-operators-f2d7r\" (UID: \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\") " pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.416028 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-catalog-content\") pod \"redhat-operators-f2d7r\" (UID: \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\") " pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.416109 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-utilities\") pod \"redhat-operators-f2d7r\" (UID: \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\") " pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.443903 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cfmg\" (UniqueName: \"kubernetes.io/projected/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-kube-api-access-8cfmg\") pod \"redhat-operators-f2d7r\" (UID: \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\") " pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:42:47 crc kubenswrapper[4912]: I1208 21:42:47.557562 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:42:48 crc kubenswrapper[4912]: I1208 21:42:48.079941 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f2d7r"] Dec 08 21:42:49 crc kubenswrapper[4912]: I1208 21:42:49.002859 4912 generic.go:334] "Generic (PLEG): container finished" podID="a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" containerID="0365b6b63b2352ca08a3e76fe0a469c591f2f9b31ce4ebdd9ee95006f573c7fe" exitCode=0 Dec 08 21:42:49 crc kubenswrapper[4912]: I1208 21:42:49.002978 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2d7r" event={"ID":"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc","Type":"ContainerDied","Data":"0365b6b63b2352ca08a3e76fe0a469c591f2f9b31ce4ebdd9ee95006f573c7fe"} Dec 08 21:42:49 crc kubenswrapper[4912]: I1208 21:42:49.003219 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2d7r" event={"ID":"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc","Type":"ContainerStarted","Data":"03607cc91d9cd43beb42328eef6be112a87c1ce2c1d172d44b44d862edca05d5"} Dec 08 21:42:50 crc kubenswrapper[4912]: I1208 21:42:50.013638 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2d7r" event={"ID":"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc","Type":"ContainerStarted","Data":"b6b02f97e6e6ecd62f5ffde569929f2ac2f5f6a95d0b689ab4f755ac3ffb1e0b"} Dec 08 21:42:56 crc kubenswrapper[4912]: I1208 21:42:56.076466 4912 generic.go:334] "Generic (PLEG): container finished" podID="a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" containerID="b6b02f97e6e6ecd62f5ffde569929f2ac2f5f6a95d0b689ab4f755ac3ffb1e0b" exitCode=0 Dec 08 21:42:56 crc kubenswrapper[4912]: I1208 21:42:56.076575 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2d7r" event={"ID":"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc","Type":"ContainerDied","Data":"b6b02f97e6e6ecd62f5ffde569929f2ac2f5f6a95d0b689ab4f755ac3ffb1e0b"} Dec 08 21:42:58 crc kubenswrapper[4912]: I1208 21:42:58.114515 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2d7r" event={"ID":"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc","Type":"ContainerStarted","Data":"34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89"} Dec 08 21:42:58 crc kubenswrapper[4912]: I1208 21:42:58.153378 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-f2d7r" podStartSLOduration=3.239390219 podStartE2EDuration="11.153360856s" podCreationTimestamp="2025-12-08 21:42:47 +0000 UTC" firstStartedPulling="2025-12-08 21:42:49.005253168 +0000 UTC m=+1450.868255251" lastFinishedPulling="2025-12-08 21:42:56.919223815 +0000 UTC m=+1458.782225888" observedRunningTime="2025-12-08 21:42:58.1449293 +0000 UTC m=+1460.007931383" watchObservedRunningTime="2025-12-08 21:42:58.153360856 +0000 UTC m=+1460.016362939" Dec 08 21:42:59 crc kubenswrapper[4912]: I1208 21:42:59.428489 4912 scope.go:117] "RemoveContainer" containerID="38f41ffe6e495e48d03b9058b53b45cbc362a8af403a678ee846d41d145aba9f" Dec 08 21:43:00 crc kubenswrapper[4912]: I1208 21:43:00.144739 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerStarted","Data":"1fe8bbfe0c32ba5bb1fc2e9082cf233424c4b2dfeb84837ff6f189650ee0eb76"} Dec 08 21:43:00 crc kubenswrapper[4912]: I1208 21:43:00.145345 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:43:01 crc kubenswrapper[4912]: I1208 21:43:01.191323 4912 generic.go:334] "Generic (PLEG): container finished" podID="8fab798f-4b32-43d8-bb00-cb1557b45f4f" containerID="1e97ccf49707cce920e248d3ec4c378fef0738c73e5b7434ec4b4d8bef882017" exitCode=0 Dec 08 21:43:01 crc kubenswrapper[4912]: I1208 21:43:01.192692 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-lkdlz" event={"ID":"8fab798f-4b32-43d8-bb00-cb1557b45f4f","Type":"ContainerDied","Data":"1e97ccf49707cce920e248d3ec4c378fef0738c73e5b7434ec4b4d8bef882017"} Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.539794 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.714570 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-scripts\") pod \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.714633 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-combined-ca-bundle\") pod \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.714772 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jmgf\" (UniqueName: \"kubernetes.io/projected/8fab798f-4b32-43d8-bb00-cb1557b45f4f-kube-api-access-8jmgf\") pod \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.714860 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-config-data\") pod \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\" (UID: \"8fab798f-4b32-43d8-bb00-cb1557b45f4f\") " Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.720707 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fab798f-4b32-43d8-bb00-cb1557b45f4f-kube-api-access-8jmgf" (OuterVolumeSpecName: "kube-api-access-8jmgf") pod "8fab798f-4b32-43d8-bb00-cb1557b45f4f" (UID: "8fab798f-4b32-43d8-bb00-cb1557b45f4f"). InnerVolumeSpecName "kube-api-access-8jmgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.721096 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-scripts" (OuterVolumeSpecName: "scripts") pod "8fab798f-4b32-43d8-bb00-cb1557b45f4f" (UID: "8fab798f-4b32-43d8-bb00-cb1557b45f4f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.745711 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-config-data" (OuterVolumeSpecName: "config-data") pod "8fab798f-4b32-43d8-bb00-cb1557b45f4f" (UID: "8fab798f-4b32-43d8-bb00-cb1557b45f4f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.753478 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8fab798f-4b32-43d8-bb00-cb1557b45f4f" (UID: "8fab798f-4b32-43d8-bb00-cb1557b45f4f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.817234 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jmgf\" (UniqueName: \"kubernetes.io/projected/8fab798f-4b32-43d8-bb00-cb1557b45f4f-kube-api-access-8jmgf\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.817284 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.817294 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.817303 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fab798f-4b32-43d8-bb00-cb1557b45f4f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.965005 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.965156 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.965223 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.965953 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bca8a47721443f47b11226277d892ecf92290a5aadbe5d8268e53db7e2821cf1"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:43:02 crc kubenswrapper[4912]: I1208 21:43:02.966034 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://bca8a47721443f47b11226277d892ecf92290a5aadbe5d8268e53db7e2821cf1" gracePeriod=600 Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.211869 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-lkdlz" event={"ID":"8fab798f-4b32-43d8-bb00-cb1557b45f4f","Type":"ContainerDied","Data":"3e924512f8b1d469f62a929ef3104c1b886bcd16886744eebcbb12a8908bf5e2"} Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.212208 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e924512f8b1d469f62a929ef3104c1b886bcd16886744eebcbb12a8908bf5e2" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.211904 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-lkdlz" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.214573 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="bca8a47721443f47b11226277d892ecf92290a5aadbe5d8268e53db7e2821cf1" exitCode=0 Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.214635 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"bca8a47721443f47b11226277d892ecf92290a5aadbe5d8268e53db7e2821cf1"} Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.214695 4912 scope.go:117] "RemoveContainer" containerID="2bbe83801f5e4f664de5ce4a79737a9126b08b32fb28e3b53cf865ffeb56f1e8" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.354658 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 08 21:43:03 crc kubenswrapper[4912]: E1208 21:43:03.355297 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fab798f-4b32-43d8-bb00-cb1557b45f4f" containerName="nova-cell0-conductor-db-sync" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.355322 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fab798f-4b32-43d8-bb00-cb1557b45f4f" containerName="nova-cell0-conductor-db-sync" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.355568 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fab798f-4b32-43d8-bb00-cb1557b45f4f" containerName="nova-cell0-conductor-db-sync" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.356393 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.365922 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.366199 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-jmq8c" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.391166 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.530635 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a84a18f-003f-48fb-b522-290957dbb5db-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"2a84a18f-003f-48fb-b522-290957dbb5db\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.530713 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a84a18f-003f-48fb-b522-290957dbb5db-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"2a84a18f-003f-48fb-b522-290957dbb5db\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.531063 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbjrz\" (UniqueName: \"kubernetes.io/projected/2a84a18f-003f-48fb-b522-290957dbb5db-kube-api-access-cbjrz\") pod \"nova-cell0-conductor-0\" (UID: \"2a84a18f-003f-48fb-b522-290957dbb5db\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.633292 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbjrz\" (UniqueName: \"kubernetes.io/projected/2a84a18f-003f-48fb-b522-290957dbb5db-kube-api-access-cbjrz\") pod \"nova-cell0-conductor-0\" (UID: \"2a84a18f-003f-48fb-b522-290957dbb5db\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.633502 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a84a18f-003f-48fb-b522-290957dbb5db-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"2a84a18f-003f-48fb-b522-290957dbb5db\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.633557 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a84a18f-003f-48fb-b522-290957dbb5db-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"2a84a18f-003f-48fb-b522-290957dbb5db\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.641028 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a84a18f-003f-48fb-b522-290957dbb5db-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"2a84a18f-003f-48fb-b522-290957dbb5db\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.648802 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a84a18f-003f-48fb-b522-290957dbb5db-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"2a84a18f-003f-48fb-b522-290957dbb5db\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.658276 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbjrz\" (UniqueName: \"kubernetes.io/projected/2a84a18f-003f-48fb-b522-290957dbb5db-kube-api-access-cbjrz\") pod \"nova-cell0-conductor-0\" (UID: \"2a84a18f-003f-48fb-b522-290957dbb5db\") " pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:03 crc kubenswrapper[4912]: I1208 21:43:03.695476 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:04 crc kubenswrapper[4912]: W1208 21:43:04.179283 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a84a18f_003f_48fb_b522_290957dbb5db.slice/crio-defb8c4469c39fb6254ea2e63f05f9d8d2197e22dee9a2df254980ec5d906153 WatchSource:0}: Error finding container defb8c4469c39fb6254ea2e63f05f9d8d2197e22dee9a2df254980ec5d906153: Status 404 returned error can't find the container with id defb8c4469c39fb6254ea2e63f05f9d8d2197e22dee9a2df254980ec5d906153 Dec 08 21:43:04 crc kubenswrapper[4912]: I1208 21:43:04.184845 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 08 21:43:04 crc kubenswrapper[4912]: I1208 21:43:04.229298 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"2a84a18f-003f-48fb-b522-290957dbb5db","Type":"ContainerStarted","Data":"defb8c4469c39fb6254ea2e63f05f9d8d2197e22dee9a2df254980ec5d906153"} Dec 08 21:43:04 crc kubenswrapper[4912]: I1208 21:43:04.234300 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3"} Dec 08 21:43:05 crc kubenswrapper[4912]: I1208 21:43:05.243325 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"2a84a18f-003f-48fb-b522-290957dbb5db","Type":"ContainerStarted","Data":"8d7ffe5de162a7a83dce9380a7b8ba36b167b42ecee5b4d3f697503fd9e8a5f7"} Dec 08 21:43:05 crc kubenswrapper[4912]: I1208 21:43:05.252588 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:43:05 crc kubenswrapper[4912]: I1208 21:43:05.296074 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.296021176 podStartE2EDuration="2.296021176s" podCreationTimestamp="2025-12-08 21:43:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:05.261747246 +0000 UTC m=+1467.124749319" watchObservedRunningTime="2025-12-08 21:43:05.296021176 +0000 UTC m=+1467.159023269" Dec 08 21:43:06 crc kubenswrapper[4912]: I1208 21:43:06.251643 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:07 crc kubenswrapper[4912]: I1208 21:43:07.557792 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:43:07 crc kubenswrapper[4912]: I1208 21:43:07.558109 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:43:07 crc kubenswrapper[4912]: I1208 21:43:07.620496 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:43:08 crc kubenswrapper[4912]: I1208 21:43:08.319104 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:43:08 crc kubenswrapper[4912]: I1208 21:43:08.365388 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f2d7r"] Dec 08 21:43:10 crc kubenswrapper[4912]: I1208 21:43:10.290090 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-f2d7r" podUID="a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" containerName="registry-server" containerID="cri-o://34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89" gracePeriod=2 Dec 08 21:43:10 crc kubenswrapper[4912]: I1208 21:43:10.754251 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:43:10 crc kubenswrapper[4912]: I1208 21:43:10.880211 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-catalog-content\") pod \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\" (UID: \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\") " Dec 08 21:43:10 crc kubenswrapper[4912]: I1208 21:43:10.880313 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-utilities\") pod \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\" (UID: \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\") " Dec 08 21:43:10 crc kubenswrapper[4912]: I1208 21:43:10.880407 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cfmg\" (UniqueName: \"kubernetes.io/projected/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-kube-api-access-8cfmg\") pod \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\" (UID: \"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc\") " Dec 08 21:43:10 crc kubenswrapper[4912]: I1208 21:43:10.881442 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-utilities" (OuterVolumeSpecName: "utilities") pod "a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" (UID: "a4e1d86a-29c3-4f47-be06-0d5dafad3dbc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:43:10 crc kubenswrapper[4912]: I1208 21:43:10.888252 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-kube-api-access-8cfmg" (OuterVolumeSpecName: "kube-api-access-8cfmg") pod "a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" (UID: "a4e1d86a-29c3-4f47-be06-0d5dafad3dbc"). InnerVolumeSpecName "kube-api-access-8cfmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:10 crc kubenswrapper[4912]: I1208 21:43:10.983205 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:10 crc kubenswrapper[4912]: I1208 21:43:10.983247 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cfmg\" (UniqueName: \"kubernetes.io/projected/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-kube-api-access-8cfmg\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.003560 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" (UID: "a4e1d86a-29c3-4f47-be06-0d5dafad3dbc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.085463 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.304051 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2d7r" event={"ID":"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc","Type":"ContainerDied","Data":"34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89"} Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.304090 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f2d7r" Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.303960 4912 generic.go:334] "Generic (PLEG): container finished" podID="a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" containerID="34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89" exitCode=0 Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.304162 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2d7r" event={"ID":"a4e1d86a-29c3-4f47-be06-0d5dafad3dbc","Type":"ContainerDied","Data":"03607cc91d9cd43beb42328eef6be112a87c1ce2c1d172d44b44d862edca05d5"} Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.304130 4912 scope.go:117] "RemoveContainer" containerID="34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89" Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.327259 4912 scope.go:117] "RemoveContainer" containerID="b6b02f97e6e6ecd62f5ffde569929f2ac2f5f6a95d0b689ab4f755ac3ffb1e0b" Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.358478 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f2d7r"] Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.363827 4912 scope.go:117] "RemoveContainer" containerID="0365b6b63b2352ca08a3e76fe0a469c591f2f9b31ce4ebdd9ee95006f573c7fe" Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.369882 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-f2d7r"] Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.411917 4912 scope.go:117] "RemoveContainer" containerID="34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89" Dec 08 21:43:11 crc kubenswrapper[4912]: E1208 21:43:11.412368 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89\": container with ID starting with 34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89 not found: ID does not exist" containerID="34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89" Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.412406 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89"} err="failed to get container status \"34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89\": rpc error: code = NotFound desc = could not find container \"34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89\": container with ID starting with 34c93dbdf272e5ca37c2e065ab77f953c433d813144e56076743084a8f3dfe89 not found: ID does not exist" Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.412432 4912 scope.go:117] "RemoveContainer" containerID="b6b02f97e6e6ecd62f5ffde569929f2ac2f5f6a95d0b689ab4f755ac3ffb1e0b" Dec 08 21:43:11 crc kubenswrapper[4912]: E1208 21:43:11.412727 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6b02f97e6e6ecd62f5ffde569929f2ac2f5f6a95d0b689ab4f755ac3ffb1e0b\": container with ID starting with b6b02f97e6e6ecd62f5ffde569929f2ac2f5f6a95d0b689ab4f755ac3ffb1e0b not found: ID does not exist" containerID="b6b02f97e6e6ecd62f5ffde569929f2ac2f5f6a95d0b689ab4f755ac3ffb1e0b" Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.412751 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6b02f97e6e6ecd62f5ffde569929f2ac2f5f6a95d0b689ab4f755ac3ffb1e0b"} err="failed to get container status \"b6b02f97e6e6ecd62f5ffde569929f2ac2f5f6a95d0b689ab4f755ac3ffb1e0b\": rpc error: code = NotFound desc = could not find container \"b6b02f97e6e6ecd62f5ffde569929f2ac2f5f6a95d0b689ab4f755ac3ffb1e0b\": container with ID starting with b6b02f97e6e6ecd62f5ffde569929f2ac2f5f6a95d0b689ab4f755ac3ffb1e0b not found: ID does not exist" Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.412767 4912 scope.go:117] "RemoveContainer" containerID="0365b6b63b2352ca08a3e76fe0a469c591f2f9b31ce4ebdd9ee95006f573c7fe" Dec 08 21:43:11 crc kubenswrapper[4912]: E1208 21:43:11.413145 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0365b6b63b2352ca08a3e76fe0a469c591f2f9b31ce4ebdd9ee95006f573c7fe\": container with ID starting with 0365b6b63b2352ca08a3e76fe0a469c591f2f9b31ce4ebdd9ee95006f573c7fe not found: ID does not exist" containerID="0365b6b63b2352ca08a3e76fe0a469c591f2f9b31ce4ebdd9ee95006f573c7fe" Dec 08 21:43:11 crc kubenswrapper[4912]: I1208 21:43:11.413206 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0365b6b63b2352ca08a3e76fe0a469c591f2f9b31ce4ebdd9ee95006f573c7fe"} err="failed to get container status \"0365b6b63b2352ca08a3e76fe0a469c591f2f9b31ce4ebdd9ee95006f573c7fe\": rpc error: code = NotFound desc = could not find container \"0365b6b63b2352ca08a3e76fe0a469c591f2f9b31ce4ebdd9ee95006f573c7fe\": container with ID starting with 0365b6b63b2352ca08a3e76fe0a469c591f2f9b31ce4ebdd9ee95006f573c7fe not found: ID does not exist" Dec 08 21:43:12 crc kubenswrapper[4912]: I1208 21:43:12.441094 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" path="/var/lib/kubelet/pods/a4e1d86a-29c3-4f47-be06-0d5dafad3dbc/volumes" Dec 08 21:43:13 crc kubenswrapper[4912]: I1208 21:43:13.720966 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.207318 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-tzgbw"] Dec 08 21:43:14 crc kubenswrapper[4912]: E1208 21:43:14.207705 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" containerName="extract-content" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.207722 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" containerName="extract-content" Dec 08 21:43:14 crc kubenswrapper[4912]: E1208 21:43:14.207762 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" containerName="extract-utilities" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.207769 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" containerName="extract-utilities" Dec 08 21:43:14 crc kubenswrapper[4912]: E1208 21:43:14.207787 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" containerName="registry-server" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.207794 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" containerName="registry-server" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.208001 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4e1d86a-29c3-4f47-be06-0d5dafad3dbc" containerName="registry-server" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.208609 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.211212 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.212518 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.225339 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-tzgbw"] Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.340433 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.341821 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.347047 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.355826 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-scripts\") pod \"nova-cell0-cell-mapping-tzgbw\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.355887 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-tzgbw\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.356161 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4f7q\" (UniqueName: \"kubernetes.io/projected/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-kube-api-access-p4f7q\") pod \"nova-cell0-cell-mapping-tzgbw\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.356221 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-config-data\") pod \"nova-cell0-cell-mapping-tzgbw\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.358589 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.459535 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f4962b-16a3-4bf6-a747-7a6cb091023e-config-data\") pod \"nova-scheduler-0\" (UID: \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.459939 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4f7q\" (UniqueName: \"kubernetes.io/projected/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-kube-api-access-p4f7q\") pod \"nova-cell0-cell-mapping-tzgbw\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.459977 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-config-data\") pod \"nova-cell0-cell-mapping-tzgbw\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.460003 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhcqb\" (UniqueName: \"kubernetes.io/projected/b9f4962b-16a3-4bf6-a747-7a6cb091023e-kube-api-access-qhcqb\") pod \"nova-scheduler-0\" (UID: \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.460080 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f4962b-16a3-4bf6-a747-7a6cb091023e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.460207 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-scripts\") pod \"nova-cell0-cell-mapping-tzgbw\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.460240 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-tzgbw\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.518264 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-tzgbw\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.519855 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-config-data\") pod \"nova-cell0-cell-mapping-tzgbw\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.529566 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4f7q\" (UniqueName: \"kubernetes.io/projected/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-kube-api-access-p4f7q\") pod \"nova-cell0-cell-mapping-tzgbw\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.555366 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-scripts\") pod \"nova-cell0-cell-mapping-tzgbw\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.564868 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f4962b-16a3-4bf6-a747-7a6cb091023e-config-data\") pod \"nova-scheduler-0\" (UID: \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.564963 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhcqb\" (UniqueName: \"kubernetes.io/projected/b9f4962b-16a3-4bf6-a747-7a6cb091023e-kube-api-access-qhcqb\") pod \"nova-scheduler-0\" (UID: \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.565047 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f4962b-16a3-4bf6-a747-7a6cb091023e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.571494 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f4962b-16a3-4bf6-a747-7a6cb091023e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.571596 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.573580 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.577967 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f4962b-16a3-4bf6-a747-7a6cb091023e-config-data\") pod \"nova-scheduler-0\" (UID: \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.585388 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.611454 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.621850 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.623499 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.625281 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.632243 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhcqb\" (UniqueName: \"kubernetes.io/projected/b9f4962b-16a3-4bf6-a747-7a6cb091023e-kube-api-access-qhcqb\") pod \"nova-scheduler-0\" (UID: \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.640570 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.672986 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-8jzfb"] Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.673633 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwbr7\" (UniqueName: \"kubernetes.io/projected/e1892931-4a93-4f52-a9b9-386022a6e76e-kube-api-access-xwbr7\") pod \"nova-metadata-0\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.673722 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1892931-4a93-4f52-a9b9-386022a6e76e-config-data\") pod \"nova-metadata-0\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.673867 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1892931-4a93-4f52-a9b9-386022a6e76e-logs\") pod \"nova-metadata-0\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.673886 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1892931-4a93-4f52-a9b9-386022a6e76e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.678320 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.694016 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.695478 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.697289 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.698804 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.720932 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-8jzfb"] Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.755903 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775428 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c6181b-65e4-403f-aa0c-43ff60a840ea-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775495 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-dns-svc\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775515 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd5qd\" (UniqueName: \"kubernetes.io/projected/5765ec98-7f46-44c4-81bf-506b054e8e06-kube-api-access-sd5qd\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775536 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32c6181b-65e4-403f-aa0c-43ff60a840ea-config-data\") pod \"nova-api-0\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775562 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1892931-4a93-4f52-a9b9-386022a6e76e-logs\") pod \"nova-metadata-0\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775580 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1892931-4a93-4f52-a9b9-386022a6e76e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775624 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32c6181b-65e4-403f-aa0c-43ff60a840ea-logs\") pod \"nova-api-0\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775645 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775676 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66c9b\" (UniqueName: \"kubernetes.io/projected/32c6181b-65e4-403f-aa0c-43ff60a840ea-kube-api-access-66c9b\") pod \"nova-api-0\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775703 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-config\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775723 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwbr7\" (UniqueName: \"kubernetes.io/projected/e1892931-4a93-4f52-a9b9-386022a6e76e-kube-api-access-xwbr7\") pod \"nova-metadata-0\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775759 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1892931-4a93-4f52-a9b9-386022a6e76e-config-data\") pod \"nova-metadata-0\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775787 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.775836 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.776496 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1892931-4a93-4f52-a9b9-386022a6e76e-logs\") pod \"nova-metadata-0\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.780155 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1892931-4a93-4f52-a9b9-386022a6e76e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.795425 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1892931-4a93-4f52-a9b9-386022a6e76e-config-data\") pod \"nova-metadata-0\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.807306 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwbr7\" (UniqueName: \"kubernetes.io/projected/e1892931-4a93-4f52-a9b9-386022a6e76e-kube-api-access-xwbr7\") pod \"nova-metadata-0\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.830295 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878028 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32c6181b-65e4-403f-aa0c-43ff60a840ea-logs\") pod \"nova-api-0\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878081 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878315 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66c9b\" (UniqueName: \"kubernetes.io/projected/32c6181b-65e4-403f-aa0c-43ff60a840ea-kube-api-access-66c9b\") pod \"nova-api-0\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878374 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-config\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878412 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32c6181b-65e4-403f-aa0c-43ff60a840ea-logs\") pod \"nova-api-0\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878637 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878743 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a47973b8-33a2-413a-bbaa-15d1ad677c6a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878790 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfmjj\" (UniqueName: \"kubernetes.io/projected/a47973b8-33a2-413a-bbaa-15d1ad677c6a-kube-api-access-vfmjj\") pod \"nova-cell1-novncproxy-0\" (UID: \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878811 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878842 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c6181b-65e4-403f-aa0c-43ff60a840ea-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878866 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-dns-svc\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878883 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd5qd\" (UniqueName: \"kubernetes.io/projected/5765ec98-7f46-44c4-81bf-506b054e8e06-kube-api-access-sd5qd\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.878900 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32c6181b-65e4-403f-aa0c-43ff60a840ea-config-data\") pod \"nova-api-0\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.879613 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a47973b8-33a2-413a-bbaa-15d1ad677c6a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.880364 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.881073 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.881090 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.881114 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-dns-svc\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.881938 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-config\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.883915 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32c6181b-65e4-403f-aa0c-43ff60a840ea-config-data\") pod \"nova-api-0\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.894381 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c6181b-65e4-403f-aa0c-43ff60a840ea-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.896754 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66c9b\" (UniqueName: \"kubernetes.io/projected/32c6181b-65e4-403f-aa0c-43ff60a840ea-kube-api-access-66c9b\") pod \"nova-api-0\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " pod="openstack/nova-api-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.911004 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd5qd\" (UniqueName: \"kubernetes.io/projected/5765ec98-7f46-44c4-81bf-506b054e8e06-kube-api-access-sd5qd\") pod \"dnsmasq-dns-757b4f8459-8jzfb\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.981014 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a47973b8-33a2-413a-bbaa-15d1ad677c6a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.981291 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfmjj\" (UniqueName: \"kubernetes.io/projected/a47973b8-33a2-413a-bbaa-15d1ad677c6a-kube-api-access-vfmjj\") pod \"nova-cell1-novncproxy-0\" (UID: \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.981454 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a47973b8-33a2-413a-bbaa-15d1ad677c6a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.989452 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a47973b8-33a2-413a-bbaa-15d1ad677c6a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:14 crc kubenswrapper[4912]: I1208 21:43:14.989565 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a47973b8-33a2-413a-bbaa-15d1ad677c6a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.005529 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfmjj\" (UniqueName: \"kubernetes.io/projected/a47973b8-33a2-413a-bbaa-15d1ad677c6a-kube-api-access-vfmjj\") pod \"nova-cell1-novncproxy-0\" (UID: \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.089940 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.108237 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.118755 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.129739 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.224162 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pps7s"] Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.225990 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.233534 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.233776 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.244816 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pps7s"] Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.373703 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.391125 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-scripts\") pod \"nova-cell1-conductor-db-sync-pps7s\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.391498 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pps7s\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.391693 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdjsc\" (UniqueName: \"kubernetes.io/projected/a847f16f-0bee-43a2-ba39-28c7a0900a93-kube-api-access-fdjsc\") pod \"nova-cell1-conductor-db-sync-pps7s\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.391723 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-config-data\") pod \"nova-cell1-conductor-db-sync-pps7s\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.463539 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-tzgbw"] Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.493957 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pps7s\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.494058 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdjsc\" (UniqueName: \"kubernetes.io/projected/a847f16f-0bee-43a2-ba39-28c7a0900a93-kube-api-access-fdjsc\") pod \"nova-cell1-conductor-db-sync-pps7s\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.494088 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-config-data\") pod \"nova-cell1-conductor-db-sync-pps7s\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.494148 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-scripts\") pod \"nova-cell1-conductor-db-sync-pps7s\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.504130 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-config-data\") pod \"nova-cell1-conductor-db-sync-pps7s\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.505959 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-scripts\") pod \"nova-cell1-conductor-db-sync-pps7s\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.513173 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pps7s\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.517706 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdjsc\" (UniqueName: \"kubernetes.io/projected/a847f16f-0bee-43a2-ba39-28c7a0900a93-kube-api-access-fdjsc\") pod \"nova-cell1-conductor-db-sync-pps7s\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.554734 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.734090 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.854137 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.920191 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:15 crc kubenswrapper[4912]: W1208 21:43:15.925521 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1892931_4a93_4f52_a9b9_386022a6e76e.slice/crio-ff28fa578cb05c44af576c98389b65583ba5330d27e11bb2194745220b655219 WatchSource:0}: Error finding container ff28fa578cb05c44af576c98389b65583ba5330d27e11bb2194745220b655219: Status 404 returned error can't find the container with id ff28fa578cb05c44af576c98389b65583ba5330d27e11bb2194745220b655219 Dec 08 21:43:15 crc kubenswrapper[4912]: I1208 21:43:15.986574 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-8jzfb"] Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.053289 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pps7s"] Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.360451 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-tzgbw" event={"ID":"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf","Type":"ContainerStarted","Data":"3324ce8add76f2989b9922b5a7486a75ff1be3230bb5ceaffc867c62e42cbd17"} Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.360515 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-tzgbw" event={"ID":"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf","Type":"ContainerStarted","Data":"d32346f918318113796465c5635fa942014f4d1778583c30b71897f6746c69e8"} Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.363829 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pps7s" event={"ID":"a847f16f-0bee-43a2-ba39-28c7a0900a93","Type":"ContainerStarted","Data":"5b0e139b3bf78d4ff3a38df60b465ab79ec8c1e37f806d1152983d10efb82f91"} Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.363864 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pps7s" event={"ID":"a847f16f-0bee-43a2-ba39-28c7a0900a93","Type":"ContainerStarted","Data":"b2d9e8cfff8b44486be966d9e3d8a11e0cee60cbb98d9c4b92d993217c8b88dd"} Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.365779 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a47973b8-33a2-413a-bbaa-15d1ad677c6a","Type":"ContainerStarted","Data":"f16a9effa1417eacd4d049da9e73b54530b9db3d5aa632adb000a9c215e690ce"} Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.367939 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e1892931-4a93-4f52-a9b9-386022a6e76e","Type":"ContainerStarted","Data":"ff28fa578cb05c44af576c98389b65583ba5330d27e11bb2194745220b655219"} Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.373334 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"32c6181b-65e4-403f-aa0c-43ff60a840ea","Type":"ContainerStarted","Data":"69a12e5f4f43fc4c75dc404355d8772f4997b8b3e035f433398748d63ebc7b39"} Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.376947 4912 generic.go:334] "Generic (PLEG): container finished" podID="5765ec98-7f46-44c4-81bf-506b054e8e06" containerID="ed4913153b72688ccb802611853dabd081880b2318419893fff261a4c4a34a90" exitCode=0 Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.377566 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" event={"ID":"5765ec98-7f46-44c4-81bf-506b054e8e06","Type":"ContainerDied","Data":"ed4913153b72688ccb802611853dabd081880b2318419893fff261a4c4a34a90"} Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.377625 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" event={"ID":"5765ec98-7f46-44c4-81bf-506b054e8e06","Type":"ContainerStarted","Data":"d2eb7adf28faad44da763686c7bb72224d2ab39fcc5f1898d3892db132d40e88"} Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.389427 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b9f4962b-16a3-4bf6-a747-7a6cb091023e","Type":"ContainerStarted","Data":"c7a2bd8c7fa8d79dd07d1a3c5a2300152bae0a0b27a3d33dcd1824fb0831111b"} Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.427703 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-tzgbw" podStartSLOduration=2.4276773 podStartE2EDuration="2.4276773s" podCreationTimestamp="2025-12-08 21:43:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:16.388459383 +0000 UTC m=+1478.251461486" watchObservedRunningTime="2025-12-08 21:43:16.4276773 +0000 UTC m=+1478.290679383" Dec 08 21:43:16 crc kubenswrapper[4912]: I1208 21:43:16.440522 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-pps7s" podStartSLOduration=1.440495359 podStartE2EDuration="1.440495359s" podCreationTimestamp="2025-12-08 21:43:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:16.413091216 +0000 UTC m=+1478.276093309" watchObservedRunningTime="2025-12-08 21:43:16.440495359 +0000 UTC m=+1478.303497442" Dec 08 21:43:17 crc kubenswrapper[4912]: I1208 21:43:17.413807 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" event={"ID":"5765ec98-7f46-44c4-81bf-506b054e8e06","Type":"ContainerStarted","Data":"7f6fbba03343d4a80ff37a437561d4c7c555fbc53ff7f8d9ded26e868803eab9"} Dec 08 21:43:17 crc kubenswrapper[4912]: I1208 21:43:17.414172 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:17 crc kubenswrapper[4912]: I1208 21:43:17.439189 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" podStartSLOduration=3.439168735 podStartE2EDuration="3.439168735s" podCreationTimestamp="2025-12-08 21:43:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:17.433582122 +0000 UTC m=+1479.296584225" watchObservedRunningTime="2025-12-08 21:43:17.439168735 +0000 UTC m=+1479.302170818" Dec 08 21:43:18 crc kubenswrapper[4912]: I1208 21:43:18.112013 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:43:18 crc kubenswrapper[4912]: I1208 21:43:18.127921 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:19 crc kubenswrapper[4912]: I1208 21:43:19.436413 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a47973b8-33a2-413a-bbaa-15d1ad677c6a","Type":"ContainerStarted","Data":"b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6"} Dec 08 21:43:19 crc kubenswrapper[4912]: I1208 21:43:19.436490 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="a47973b8-33a2-413a-bbaa-15d1ad677c6a" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6" gracePeriod=30 Dec 08 21:43:19 crc kubenswrapper[4912]: I1208 21:43:19.440798 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e1892931-4a93-4f52-a9b9-386022a6e76e","Type":"ContainerStarted","Data":"4fb17d2e5e4425e5eb935c4e9a6a9706bd9746dddf85b4b323c8cd440c7a6700"} Dec 08 21:43:19 crc kubenswrapper[4912]: I1208 21:43:19.480833 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"32c6181b-65e4-403f-aa0c-43ff60a840ea","Type":"ContainerStarted","Data":"5a62f5df35a1b5b6eaa34d535855002be112ec2b8b718bf7aeee84191d5750ef"} Dec 08 21:43:19 crc kubenswrapper[4912]: I1208 21:43:19.485741 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.568052394 podStartE2EDuration="5.485712249s" podCreationTimestamp="2025-12-08 21:43:14 +0000 UTC" firstStartedPulling="2025-12-08 21:43:15.924281343 +0000 UTC m=+1477.787283426" lastFinishedPulling="2025-12-08 21:43:18.841941198 +0000 UTC m=+1480.704943281" observedRunningTime="2025-12-08 21:43:19.464831463 +0000 UTC m=+1481.327833546" watchObservedRunningTime="2025-12-08 21:43:19.485712249 +0000 UTC m=+1481.348714332" Dec 08 21:43:19 crc kubenswrapper[4912]: I1208 21:43:19.504264 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b9f4962b-16a3-4bf6-a747-7a6cb091023e","Type":"ContainerStarted","Data":"5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f"} Dec 08 21:43:19 crc kubenswrapper[4912]: I1208 21:43:19.528877 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.116313035 podStartE2EDuration="5.528858897s" podCreationTimestamp="2025-12-08 21:43:14 +0000 UTC" firstStartedPulling="2025-12-08 21:43:15.407621467 +0000 UTC m=+1477.270623550" lastFinishedPulling="2025-12-08 21:43:18.820167329 +0000 UTC m=+1480.683169412" observedRunningTime="2025-12-08 21:43:19.517765202 +0000 UTC m=+1481.380767285" watchObservedRunningTime="2025-12-08 21:43:19.528858897 +0000 UTC m=+1481.391860980" Dec 08 21:43:19 crc kubenswrapper[4912]: I1208 21:43:19.698097 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 08 21:43:20 crc kubenswrapper[4912]: I1208 21:43:20.130711 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:20 crc kubenswrapper[4912]: I1208 21:43:20.516975 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e1892931-4a93-4f52-a9b9-386022a6e76e","Type":"ContainerStarted","Data":"4e31d7b6203419c626ea5ad086e6634e5919ce4e406b64f2688f2b567237afd2"} Dec 08 21:43:20 crc kubenswrapper[4912]: I1208 21:43:20.517104 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e1892931-4a93-4f52-a9b9-386022a6e76e" containerName="nova-metadata-log" containerID="cri-o://4fb17d2e5e4425e5eb935c4e9a6a9706bd9746dddf85b4b323c8cd440c7a6700" gracePeriod=30 Dec 08 21:43:20 crc kubenswrapper[4912]: I1208 21:43:20.517189 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e1892931-4a93-4f52-a9b9-386022a6e76e" containerName="nova-metadata-metadata" containerID="cri-o://4e31d7b6203419c626ea5ad086e6634e5919ce4e406b64f2688f2b567237afd2" gracePeriod=30 Dec 08 21:43:20 crc kubenswrapper[4912]: I1208 21:43:20.524208 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"32c6181b-65e4-403f-aa0c-43ff60a840ea","Type":"ContainerStarted","Data":"8e8efc064eafb595f34f248ad02afefbcbcc421b82285a99ecc83a1e5a96b1b3"} Dec 08 21:43:20 crc kubenswrapper[4912]: I1208 21:43:20.543878 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.606674026 podStartE2EDuration="6.543853122s" podCreationTimestamp="2025-12-08 21:43:14 +0000 UTC" firstStartedPulling="2025-12-08 21:43:15.943328162 +0000 UTC m=+1477.806330245" lastFinishedPulling="2025-12-08 21:43:18.880507258 +0000 UTC m=+1480.743509341" observedRunningTime="2025-12-08 21:43:20.539418838 +0000 UTC m=+1482.402420931" watchObservedRunningTime="2025-12-08 21:43:20.543853122 +0000 UTC m=+1482.406855205" Dec 08 21:43:20 crc kubenswrapper[4912]: I1208 21:43:20.566777 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.489837876 podStartE2EDuration="6.56675583s" podCreationTimestamp="2025-12-08 21:43:14 +0000 UTC" firstStartedPulling="2025-12-08 21:43:15.766883842 +0000 UTC m=+1477.629885925" lastFinishedPulling="2025-12-08 21:43:18.843801796 +0000 UTC m=+1480.706803879" observedRunningTime="2025-12-08 21:43:20.558794315 +0000 UTC m=+1482.421796398" watchObservedRunningTime="2025-12-08 21:43:20.56675583 +0000 UTC m=+1482.429757923" Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.541643 4912 generic.go:334] "Generic (PLEG): container finished" podID="e1892931-4a93-4f52-a9b9-386022a6e76e" containerID="4e31d7b6203419c626ea5ad086e6634e5919ce4e406b64f2688f2b567237afd2" exitCode=0 Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.543246 4912 generic.go:334] "Generic (PLEG): container finished" podID="e1892931-4a93-4f52-a9b9-386022a6e76e" containerID="4fb17d2e5e4425e5eb935c4e9a6a9706bd9746dddf85b4b323c8cd440c7a6700" exitCode=143 Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.541744 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e1892931-4a93-4f52-a9b9-386022a6e76e","Type":"ContainerDied","Data":"4e31d7b6203419c626ea5ad086e6634e5919ce4e406b64f2688f2b567237afd2"} Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.543436 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e1892931-4a93-4f52-a9b9-386022a6e76e","Type":"ContainerDied","Data":"4fb17d2e5e4425e5eb935c4e9a6a9706bd9746dddf85b4b323c8cd440c7a6700"} Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.742049 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.843876 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1892931-4a93-4f52-a9b9-386022a6e76e-combined-ca-bundle\") pod \"e1892931-4a93-4f52-a9b9-386022a6e76e\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.844018 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwbr7\" (UniqueName: \"kubernetes.io/projected/e1892931-4a93-4f52-a9b9-386022a6e76e-kube-api-access-xwbr7\") pod \"e1892931-4a93-4f52-a9b9-386022a6e76e\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.844090 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1892931-4a93-4f52-a9b9-386022a6e76e-logs\") pod \"e1892931-4a93-4f52-a9b9-386022a6e76e\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.844143 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1892931-4a93-4f52-a9b9-386022a6e76e-config-data\") pod \"e1892931-4a93-4f52-a9b9-386022a6e76e\" (UID: \"e1892931-4a93-4f52-a9b9-386022a6e76e\") " Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.844443 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1892931-4a93-4f52-a9b9-386022a6e76e-logs" (OuterVolumeSpecName: "logs") pod "e1892931-4a93-4f52-a9b9-386022a6e76e" (UID: "e1892931-4a93-4f52-a9b9-386022a6e76e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.860473 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1892931-4a93-4f52-a9b9-386022a6e76e-kube-api-access-xwbr7" (OuterVolumeSpecName: "kube-api-access-xwbr7") pod "e1892931-4a93-4f52-a9b9-386022a6e76e" (UID: "e1892931-4a93-4f52-a9b9-386022a6e76e"). InnerVolumeSpecName "kube-api-access-xwbr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.875641 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1892931-4a93-4f52-a9b9-386022a6e76e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1892931-4a93-4f52-a9b9-386022a6e76e" (UID: "e1892931-4a93-4f52-a9b9-386022a6e76e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.878348 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1892931-4a93-4f52-a9b9-386022a6e76e-config-data" (OuterVolumeSpecName: "config-data") pod "e1892931-4a93-4f52-a9b9-386022a6e76e" (UID: "e1892931-4a93-4f52-a9b9-386022a6e76e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.948126 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1892931-4a93-4f52-a9b9-386022a6e76e-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.948172 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1892931-4a93-4f52-a9b9-386022a6e76e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.948188 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwbr7\" (UniqueName: \"kubernetes.io/projected/e1892931-4a93-4f52-a9b9-386022a6e76e-kube-api-access-xwbr7\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:21 crc kubenswrapper[4912]: I1208 21:43:21.948197 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1892931-4a93-4f52-a9b9-386022a6e76e-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.576881 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.577422 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e1892931-4a93-4f52-a9b9-386022a6e76e","Type":"ContainerDied","Data":"ff28fa578cb05c44af576c98389b65583ba5330d27e11bb2194745220b655219"} Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.577686 4912 scope.go:117] "RemoveContainer" containerID="4e31d7b6203419c626ea5ad086e6634e5919ce4e406b64f2688f2b567237afd2" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.602719 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.610310 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.627487 4912 scope.go:117] "RemoveContainer" containerID="4fb17d2e5e4425e5eb935c4e9a6a9706bd9746dddf85b4b323c8cd440c7a6700" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.629518 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:22 crc kubenswrapper[4912]: E1208 21:43:22.629912 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1892931-4a93-4f52-a9b9-386022a6e76e" containerName="nova-metadata-metadata" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.629931 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1892931-4a93-4f52-a9b9-386022a6e76e" containerName="nova-metadata-metadata" Dec 08 21:43:22 crc kubenswrapper[4912]: E1208 21:43:22.629961 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1892931-4a93-4f52-a9b9-386022a6e76e" containerName="nova-metadata-log" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.629968 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1892931-4a93-4f52-a9b9-386022a6e76e" containerName="nova-metadata-log" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.630149 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1892931-4a93-4f52-a9b9-386022a6e76e" containerName="nova-metadata-metadata" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.630172 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1892931-4a93-4f52-a9b9-386022a6e76e" containerName="nova-metadata-log" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.631367 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.633416 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.633602 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.706895 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.773051 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eba66c96-4402-4058-a7ad-f215b7eda8c8-logs\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.773107 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvkwv\" (UniqueName: \"kubernetes.io/projected/eba66c96-4402-4058-a7ad-f215b7eda8c8-kube-api-access-rvkwv\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.773180 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-config-data\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.773507 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.773906 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.876142 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eba66c96-4402-4058-a7ad-f215b7eda8c8-logs\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.876212 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvkwv\" (UniqueName: \"kubernetes.io/projected/eba66c96-4402-4058-a7ad-f215b7eda8c8-kube-api-access-rvkwv\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.876289 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-config-data\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.876335 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.876419 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.878059 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eba66c96-4402-4058-a7ad-f215b7eda8c8-logs\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.883819 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.885073 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.886139 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-config-data\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.922262 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvkwv\" (UniqueName: \"kubernetes.io/projected/eba66c96-4402-4058-a7ad-f215b7eda8c8-kube-api-access-rvkwv\") pod \"nova-metadata-0\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " pod="openstack/nova-metadata-0" Dec 08 21:43:22 crc kubenswrapper[4912]: I1208 21:43:22.971164 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:43:23 crc kubenswrapper[4912]: I1208 21:43:23.439355 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:23 crc kubenswrapper[4912]: W1208 21:43:23.446751 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeba66c96_4402_4058_a7ad_f215b7eda8c8.slice/crio-ae43359e75b3586b4956801e5794d4dcaafd5c63f7e66c910cebfa7f928eb7c6 WatchSource:0}: Error finding container ae43359e75b3586b4956801e5794d4dcaafd5c63f7e66c910cebfa7f928eb7c6: Status 404 returned error can't find the container with id ae43359e75b3586b4956801e5794d4dcaafd5c63f7e66c910cebfa7f928eb7c6 Dec 08 21:43:23 crc kubenswrapper[4912]: I1208 21:43:23.586768 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eba66c96-4402-4058-a7ad-f215b7eda8c8","Type":"ContainerStarted","Data":"ae43359e75b3586b4956801e5794d4dcaafd5c63f7e66c910cebfa7f928eb7c6"} Dec 08 21:43:24 crc kubenswrapper[4912]: I1208 21:43:24.441307 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1892931-4a93-4f52-a9b9-386022a6e76e" path="/var/lib/kubelet/pods/e1892931-4a93-4f52-a9b9-386022a6e76e/volumes" Dec 08 21:43:24 crc kubenswrapper[4912]: I1208 21:43:24.599189 4912 generic.go:334] "Generic (PLEG): container finished" podID="6ed2e5a0-8170-47d1-99be-0d6f5cd047bf" containerID="3324ce8add76f2989b9922b5a7486a75ff1be3230bb5ceaffc867c62e42cbd17" exitCode=0 Dec 08 21:43:24 crc kubenswrapper[4912]: I1208 21:43:24.599256 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-tzgbw" event={"ID":"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf","Type":"ContainerDied","Data":"3324ce8add76f2989b9922b5a7486a75ff1be3230bb5ceaffc867c62e42cbd17"} Dec 08 21:43:24 crc kubenswrapper[4912]: I1208 21:43:24.601291 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eba66c96-4402-4058-a7ad-f215b7eda8c8","Type":"ContainerStarted","Data":"22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8"} Dec 08 21:43:24 crc kubenswrapper[4912]: I1208 21:43:24.601326 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eba66c96-4402-4058-a7ad-f215b7eda8c8","Type":"ContainerStarted","Data":"ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed"} Dec 08 21:43:24 crc kubenswrapper[4912]: I1208 21:43:24.641687 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.641665611 podStartE2EDuration="2.641665611s" podCreationTimestamp="2025-12-08 21:43:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:24.631270404 +0000 UTC m=+1486.494272497" watchObservedRunningTime="2025-12-08 21:43:24.641665611 +0000 UTC m=+1486.504667694" Dec 08 21:43:24 crc kubenswrapper[4912]: I1208 21:43:24.698131 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 08 21:43:24 crc kubenswrapper[4912]: I1208 21:43:24.724374 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.110272 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.110330 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.120199 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.199881 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-8mgnb"] Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.200148 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" podUID="c4df2822-3cb8-47b8-b06e-15601ceb80ca" containerName="dnsmasq-dns" containerID="cri-o://eca92ec2ccc639c01fc7ed4fa1c9c5fa08950a2448be4abce69e2c288754d6c2" gracePeriod=10 Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.616999 4912 generic.go:334] "Generic (PLEG): container finished" podID="c4df2822-3cb8-47b8-b06e-15601ceb80ca" containerID="eca92ec2ccc639c01fc7ed4fa1c9c5fa08950a2448be4abce69e2c288754d6c2" exitCode=0 Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.617075 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" event={"ID":"c4df2822-3cb8-47b8-b06e-15601ceb80ca","Type":"ContainerDied","Data":"eca92ec2ccc639c01fc7ed4fa1c9c5fa08950a2448be4abce69e2c288754d6c2"} Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.652444 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.730538 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.835984 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-dns-swift-storage-0\") pod \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.836208 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-dns-svc\") pod \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.836255 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-ovsdbserver-sb\") pod \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.836292 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-ovsdbserver-nb\") pod \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.836359 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-config\") pod \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.836407 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjm2k\" (UniqueName: \"kubernetes.io/projected/c4df2822-3cb8-47b8-b06e-15601ceb80ca-kube-api-access-kjm2k\") pod \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\" (UID: \"c4df2822-3cb8-47b8-b06e-15601ceb80ca\") " Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.863220 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4df2822-3cb8-47b8-b06e-15601ceb80ca-kube-api-access-kjm2k" (OuterVolumeSpecName: "kube-api-access-kjm2k") pod "c4df2822-3cb8-47b8-b06e-15601ceb80ca" (UID: "c4df2822-3cb8-47b8-b06e-15601ceb80ca"). InnerVolumeSpecName "kube-api-access-kjm2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.923160 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-config" (OuterVolumeSpecName: "config") pod "c4df2822-3cb8-47b8-b06e-15601ceb80ca" (UID: "c4df2822-3cb8-47b8-b06e-15601ceb80ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.924814 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c4df2822-3cb8-47b8-b06e-15601ceb80ca" (UID: "c4df2822-3cb8-47b8-b06e-15601ceb80ca"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.932049 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c4df2822-3cb8-47b8-b06e-15601ceb80ca" (UID: "c4df2822-3cb8-47b8-b06e-15601ceb80ca"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.938845 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c4df2822-3cb8-47b8-b06e-15601ceb80ca" (UID: "c4df2822-3cb8-47b8-b06e-15601ceb80ca"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.939085 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.939111 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.939126 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.939138 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjm2k\" (UniqueName: \"kubernetes.io/projected/c4df2822-3cb8-47b8-b06e-15601ceb80ca-kube-api-access-kjm2k\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.940983 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c4df2822-3cb8-47b8-b06e-15601ceb80ca" (UID: "c4df2822-3cb8-47b8-b06e-15601ceb80ca"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:43:25 crc kubenswrapper[4912]: I1208 21:43:25.994066 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.059254 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-config-data\") pod \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.059624 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4f7q\" (UniqueName: \"kubernetes.io/projected/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-kube-api-access-p4f7q\") pod \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.059649 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-scripts\") pod \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.059731 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-combined-ca-bundle\") pod \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\" (UID: \"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf\") " Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.060235 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.060251 4912 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4df2822-3cb8-47b8-b06e-15601ceb80ca-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.062481 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-scripts" (OuterVolumeSpecName: "scripts") pod "6ed2e5a0-8170-47d1-99be-0d6f5cd047bf" (UID: "6ed2e5a0-8170-47d1-99be-0d6f5cd047bf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.063181 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-kube-api-access-p4f7q" (OuterVolumeSpecName: "kube-api-access-p4f7q") pod "6ed2e5a0-8170-47d1-99be-0d6f5cd047bf" (UID: "6ed2e5a0-8170-47d1-99be-0d6f5cd047bf"). InnerVolumeSpecName "kube-api-access-p4f7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.089534 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6ed2e5a0-8170-47d1-99be-0d6f5cd047bf" (UID: "6ed2e5a0-8170-47d1-99be-0d6f5cd047bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.091525 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-config-data" (OuterVolumeSpecName: "config-data") pod "6ed2e5a0-8170-47d1-99be-0d6f5cd047bf" (UID: "6ed2e5a0-8170-47d1-99be-0d6f5cd047bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.162006 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.162055 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4f7q\" (UniqueName: \"kubernetes.io/projected/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-kube-api-access-p4f7q\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.162065 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.162073 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.193231 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="32c6181b-65e4-403f-aa0c-43ff60a840ea" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.182:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.193574 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="32c6181b-65e4-403f-aa0c-43ff60a840ea" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.182:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.628574 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-tzgbw" event={"ID":"6ed2e5a0-8170-47d1-99be-0d6f5cd047bf","Type":"ContainerDied","Data":"d32346f918318113796465c5635fa942014f4d1778583c30b71897f6746c69e8"} Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.628632 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d32346f918318113796465c5635fa942014f4d1778583c30b71897f6746c69e8" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.628693 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-tzgbw" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.634482 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" event={"ID":"c4df2822-3cb8-47b8-b06e-15601ceb80ca","Type":"ContainerDied","Data":"93aa3389a0381d8036b9b8c7fdfb428e60ffab9b70942645aeb5f7fed1405a0c"} Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.634583 4912 scope.go:117] "RemoveContainer" containerID="eca92ec2ccc639c01fc7ed4fa1c9c5fa08950a2448be4abce69e2c288754d6c2" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.634577 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-8mgnb" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.690412 4912 scope.go:117] "RemoveContainer" containerID="02839965b7af9e977e09f3e4132e9c7213df281b994d8235f91465b19e82bf43" Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.692827 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-8mgnb"] Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.713258 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-8mgnb"] Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.806606 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.806973 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="32c6181b-65e4-403f-aa0c-43ff60a840ea" containerName="nova-api-log" containerID="cri-o://5a62f5df35a1b5b6eaa34d535855002be112ec2b8b718bf7aeee84191d5750ef" gracePeriod=30 Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.807078 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="32c6181b-65e4-403f-aa0c-43ff60a840ea" containerName="nova-api-api" containerID="cri-o://8e8efc064eafb595f34f248ad02afefbcbcc421b82285a99ecc83a1e5a96b1b3" gracePeriod=30 Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.820476 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.832962 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.833308 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="eba66c96-4402-4058-a7ad-f215b7eda8c8" containerName="nova-metadata-log" containerID="cri-o://ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed" gracePeriod=30 Dec 08 21:43:26 crc kubenswrapper[4912]: I1208 21:43:26.833454 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="eba66c96-4402-4058-a7ad-f215b7eda8c8" containerName="nova-metadata-metadata" containerID="cri-o://22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8" gracePeriod=30 Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.305852 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.392099 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-nova-metadata-tls-certs\") pod \"eba66c96-4402-4058-a7ad-f215b7eda8c8\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.392179 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-config-data\") pod \"eba66c96-4402-4058-a7ad-f215b7eda8c8\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.392295 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvkwv\" (UniqueName: \"kubernetes.io/projected/eba66c96-4402-4058-a7ad-f215b7eda8c8-kube-api-access-rvkwv\") pod \"eba66c96-4402-4058-a7ad-f215b7eda8c8\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.392339 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eba66c96-4402-4058-a7ad-f215b7eda8c8-logs\") pod \"eba66c96-4402-4058-a7ad-f215b7eda8c8\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.392375 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-combined-ca-bundle\") pod \"eba66c96-4402-4058-a7ad-f215b7eda8c8\" (UID: \"eba66c96-4402-4058-a7ad-f215b7eda8c8\") " Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.392714 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eba66c96-4402-4058-a7ad-f215b7eda8c8-logs" (OuterVolumeSpecName: "logs") pod "eba66c96-4402-4058-a7ad-f215b7eda8c8" (UID: "eba66c96-4402-4058-a7ad-f215b7eda8c8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.392843 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eba66c96-4402-4058-a7ad-f215b7eda8c8-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.397818 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eba66c96-4402-4058-a7ad-f215b7eda8c8-kube-api-access-rvkwv" (OuterVolumeSpecName: "kube-api-access-rvkwv") pod "eba66c96-4402-4058-a7ad-f215b7eda8c8" (UID: "eba66c96-4402-4058-a7ad-f215b7eda8c8"). InnerVolumeSpecName "kube-api-access-rvkwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.432262 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eba66c96-4402-4058-a7ad-f215b7eda8c8" (UID: "eba66c96-4402-4058-a7ad-f215b7eda8c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.435321 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-config-data" (OuterVolumeSpecName: "config-data") pod "eba66c96-4402-4058-a7ad-f215b7eda8c8" (UID: "eba66c96-4402-4058-a7ad-f215b7eda8c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.484169 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "eba66c96-4402-4058-a7ad-f215b7eda8c8" (UID: "eba66c96-4402-4058-a7ad-f215b7eda8c8"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.494685 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.495146 4912 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.495407 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eba66c96-4402-4058-a7ad-f215b7eda8c8-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.495494 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvkwv\" (UniqueName: \"kubernetes.io/projected/eba66c96-4402-4058-a7ad-f215b7eda8c8-kube-api-access-rvkwv\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.653122 4912 generic.go:334] "Generic (PLEG): container finished" podID="eba66c96-4402-4058-a7ad-f215b7eda8c8" containerID="22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8" exitCode=0 Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.653160 4912 generic.go:334] "Generic (PLEG): container finished" podID="eba66c96-4402-4058-a7ad-f215b7eda8c8" containerID="ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed" exitCode=143 Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.653286 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.655094 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eba66c96-4402-4058-a7ad-f215b7eda8c8","Type":"ContainerDied","Data":"22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8"} Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.655325 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eba66c96-4402-4058-a7ad-f215b7eda8c8","Type":"ContainerDied","Data":"ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed"} Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.655425 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eba66c96-4402-4058-a7ad-f215b7eda8c8","Type":"ContainerDied","Data":"ae43359e75b3586b4956801e5794d4dcaafd5c63f7e66c910cebfa7f928eb7c6"} Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.655377 4912 scope.go:117] "RemoveContainer" containerID="22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.656303 4912 generic.go:334] "Generic (PLEG): container finished" podID="32c6181b-65e4-403f-aa0c-43ff60a840ea" containerID="5a62f5df35a1b5b6eaa34d535855002be112ec2b8b718bf7aeee84191d5750ef" exitCode=143 Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.656503 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"32c6181b-65e4-403f-aa0c-43ff60a840ea","Type":"ContainerDied","Data":"5a62f5df35a1b5b6eaa34d535855002be112ec2b8b718bf7aeee84191d5750ef"} Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.664566 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="b9f4962b-16a3-4bf6-a747-7a6cb091023e" containerName="nova-scheduler-scheduler" containerID="cri-o://5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f" gracePeriod=30 Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.699396 4912 scope.go:117] "RemoveContainer" containerID="ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.708604 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.716633 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.758263 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:27 crc kubenswrapper[4912]: E1208 21:43:27.759509 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eba66c96-4402-4058-a7ad-f215b7eda8c8" containerName="nova-metadata-log" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.759535 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="eba66c96-4402-4058-a7ad-f215b7eda8c8" containerName="nova-metadata-log" Dec 08 21:43:27 crc kubenswrapper[4912]: E1208 21:43:27.759565 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4df2822-3cb8-47b8-b06e-15601ceb80ca" containerName="dnsmasq-dns" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.759575 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4df2822-3cb8-47b8-b06e-15601ceb80ca" containerName="dnsmasq-dns" Dec 08 21:43:27 crc kubenswrapper[4912]: E1208 21:43:27.759590 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ed2e5a0-8170-47d1-99be-0d6f5cd047bf" containerName="nova-manage" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.759597 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ed2e5a0-8170-47d1-99be-0d6f5cd047bf" containerName="nova-manage" Dec 08 21:43:27 crc kubenswrapper[4912]: E1208 21:43:27.759618 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4df2822-3cb8-47b8-b06e-15601ceb80ca" containerName="init" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.759625 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4df2822-3cb8-47b8-b06e-15601ceb80ca" containerName="init" Dec 08 21:43:27 crc kubenswrapper[4912]: E1208 21:43:27.759639 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eba66c96-4402-4058-a7ad-f215b7eda8c8" containerName="nova-metadata-metadata" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.759646 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="eba66c96-4402-4058-a7ad-f215b7eda8c8" containerName="nova-metadata-metadata" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.759865 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4df2822-3cb8-47b8-b06e-15601ceb80ca" containerName="dnsmasq-dns" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.759888 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="eba66c96-4402-4058-a7ad-f215b7eda8c8" containerName="nova-metadata-metadata" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.759905 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="eba66c96-4402-4058-a7ad-f215b7eda8c8" containerName="nova-metadata-log" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.759920 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ed2e5a0-8170-47d1-99be-0d6f5cd047bf" containerName="nova-manage" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.761281 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.762081 4912 scope.go:117] "RemoveContainer" containerID="22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8" Dec 08 21:43:27 crc kubenswrapper[4912]: E1208 21:43:27.763346 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8\": container with ID starting with 22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8 not found: ID does not exist" containerID="22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.763394 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8"} err="failed to get container status \"22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8\": rpc error: code = NotFound desc = could not find container \"22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8\": container with ID starting with 22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8 not found: ID does not exist" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.763428 4912 scope.go:117] "RemoveContainer" containerID="ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed" Dec 08 21:43:27 crc kubenswrapper[4912]: E1208 21:43:27.763925 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed\": container with ID starting with ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed not found: ID does not exist" containerID="ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.763975 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed"} err="failed to get container status \"ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed\": rpc error: code = NotFound desc = could not find container \"ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed\": container with ID starting with ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed not found: ID does not exist" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.764015 4912 scope.go:117] "RemoveContainer" containerID="22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.764730 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8"} err="failed to get container status \"22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8\": rpc error: code = NotFound desc = could not find container \"22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8\": container with ID starting with 22b1ad962f33ad194bd3ada180a42f59396763f6ce4f346051771bd08f2a2ca8 not found: ID does not exist" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.764753 4912 scope.go:117] "RemoveContainer" containerID="ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.764972 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed"} err="failed to get container status \"ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed\": rpc error: code = NotFound desc = could not find container \"ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed\": container with ID starting with ebf2c4ec183095939f6fb321c29e85becdc5d171b8803db4abc68ca14a1a71ed not found: ID does not exist" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.766943 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.767355 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.771407 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.907512 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tjc7\" (UniqueName: \"kubernetes.io/projected/e972bd39-06fa-4561-ac32-072551d4da6e-kube-api-access-5tjc7\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.907713 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.908026 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.908230 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e972bd39-06fa-4561-ac32-072551d4da6e-logs\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:27 crc kubenswrapper[4912]: I1208 21:43:27.908454 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-config-data\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.010116 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.010246 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.010267 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e972bd39-06fa-4561-ac32-072551d4da6e-logs\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.010326 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-config-data\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.010371 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tjc7\" (UniqueName: \"kubernetes.io/projected/e972bd39-06fa-4561-ac32-072551d4da6e-kube-api-access-5tjc7\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.010974 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e972bd39-06fa-4561-ac32-072551d4da6e-logs\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.014373 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.015714 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.016387 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-config-data\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.029118 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tjc7\" (UniqueName: \"kubernetes.io/projected/e972bd39-06fa-4561-ac32-072551d4da6e-kube-api-access-5tjc7\") pod \"nova-metadata-0\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " pod="openstack/nova-metadata-0" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.079260 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.438997 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4df2822-3cb8-47b8-b06e-15601ceb80ca" path="/var/lib/kubelet/pods/c4df2822-3cb8-47b8-b06e-15601ceb80ca/volumes" Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.440229 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eba66c96-4402-4058-a7ad-f215b7eda8c8" path="/var/lib/kubelet/pods/eba66c96-4402-4058-a7ad-f215b7eda8c8/volumes" Dec 08 21:43:28 crc kubenswrapper[4912]: W1208 21:43:28.556099 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode972bd39_06fa_4561_ac32_072551d4da6e.slice/crio-52233b38dca8eb83f6b0fc2b678a496f2a581890bb61bec260731c13dc48ef37 WatchSource:0}: Error finding container 52233b38dca8eb83f6b0fc2b678a496f2a581890bb61bec260731c13dc48ef37: Status 404 returned error can't find the container with id 52233b38dca8eb83f6b0fc2b678a496f2a581890bb61bec260731c13dc48ef37 Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.562126 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.687769 4912 generic.go:334] "Generic (PLEG): container finished" podID="a847f16f-0bee-43a2-ba39-28c7a0900a93" containerID="5b0e139b3bf78d4ff3a38df60b465ab79ec8c1e37f806d1152983d10efb82f91" exitCode=0 Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.687858 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pps7s" event={"ID":"a847f16f-0bee-43a2-ba39-28c7a0900a93","Type":"ContainerDied","Data":"5b0e139b3bf78d4ff3a38df60b465ab79ec8c1e37f806d1152983d10efb82f91"} Dec 08 21:43:28 crc kubenswrapper[4912]: I1208 21:43:28.693345 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e972bd39-06fa-4561-ac32-072551d4da6e","Type":"ContainerStarted","Data":"52233b38dca8eb83f6b0fc2b678a496f2a581890bb61bec260731c13dc48ef37"} Dec 08 21:43:29 crc kubenswrapper[4912]: E1208 21:43:29.700111 4912 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 08 21:43:29 crc kubenswrapper[4912]: E1208 21:43:29.701963 4912 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 08 21:43:29 crc kubenswrapper[4912]: E1208 21:43:29.703568 4912 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 08 21:43:29 crc kubenswrapper[4912]: E1208 21:43:29.703645 4912 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="b9f4962b-16a3-4bf6-a747-7a6cb091023e" containerName="nova-scheduler-scheduler" Dec 08 21:43:29 crc kubenswrapper[4912]: I1208 21:43:29.708823 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e972bd39-06fa-4561-ac32-072551d4da6e","Type":"ContainerStarted","Data":"83f84b72a7994af913da80aca452e88edd4dba2a5a140c106ea83048229260d4"} Dec 08 21:43:29 crc kubenswrapper[4912]: I1208 21:43:29.708873 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e972bd39-06fa-4561-ac32-072551d4da6e","Type":"ContainerStarted","Data":"f5dd6596c110ad15fd3aa72c137b9705b30a0b7b776b53eb4b80da86a8878294"} Dec 08 21:43:29 crc kubenswrapper[4912]: I1208 21:43:29.747590 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.747570025 podStartE2EDuration="2.747570025s" podCreationTimestamp="2025-12-08 21:43:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:29.736264984 +0000 UTC m=+1491.599267067" watchObservedRunningTime="2025-12-08 21:43:29.747570025 +0000 UTC m=+1491.610572098" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.095249 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.255885 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-scripts\") pod \"a847f16f-0bee-43a2-ba39-28c7a0900a93\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.256102 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdjsc\" (UniqueName: \"kubernetes.io/projected/a847f16f-0bee-43a2-ba39-28c7a0900a93-kube-api-access-fdjsc\") pod \"a847f16f-0bee-43a2-ba39-28c7a0900a93\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.256130 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-config-data\") pod \"a847f16f-0bee-43a2-ba39-28c7a0900a93\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.256174 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-combined-ca-bundle\") pod \"a847f16f-0bee-43a2-ba39-28c7a0900a93\" (UID: \"a847f16f-0bee-43a2-ba39-28c7a0900a93\") " Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.275465 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-scripts" (OuterVolumeSpecName: "scripts") pod "a847f16f-0bee-43a2-ba39-28c7a0900a93" (UID: "a847f16f-0bee-43a2-ba39-28c7a0900a93"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.277313 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a847f16f-0bee-43a2-ba39-28c7a0900a93-kube-api-access-fdjsc" (OuterVolumeSpecName: "kube-api-access-fdjsc") pod "a847f16f-0bee-43a2-ba39-28c7a0900a93" (UID: "a847f16f-0bee-43a2-ba39-28c7a0900a93"). InnerVolumeSpecName "kube-api-access-fdjsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.287086 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a847f16f-0bee-43a2-ba39-28c7a0900a93" (UID: "a847f16f-0bee-43a2-ba39-28c7a0900a93"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.288854 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-config-data" (OuterVolumeSpecName: "config-data") pod "a847f16f-0bee-43a2-ba39-28c7a0900a93" (UID: "a847f16f-0bee-43a2-ba39-28c7a0900a93"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.358667 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.358711 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdjsc\" (UniqueName: \"kubernetes.io/projected/a847f16f-0bee-43a2-ba39-28c7a0900a93-kube-api-access-fdjsc\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.358727 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.358739 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a847f16f-0bee-43a2-ba39-28c7a0900a93-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.719412 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pps7s" event={"ID":"a847f16f-0bee-43a2-ba39-28c7a0900a93","Type":"ContainerDied","Data":"b2d9e8cfff8b44486be966d9e3d8a11e0cee60cbb98d9c4b92d993217c8b88dd"} Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.719516 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2d9e8cfff8b44486be966d9e3d8a11e0cee60cbb98d9c4b92d993217c8b88dd" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.719436 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pps7s" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.791397 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 08 21:43:30 crc kubenswrapper[4912]: E1208 21:43:30.791932 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a847f16f-0bee-43a2-ba39-28c7a0900a93" containerName="nova-cell1-conductor-db-sync" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.791956 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a847f16f-0bee-43a2-ba39-28c7a0900a93" containerName="nova-cell1-conductor-db-sync" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.792258 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="a847f16f-0bee-43a2-ba39-28c7a0900a93" containerName="nova-cell1-conductor-db-sync" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.793139 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.797878 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.822397 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.869533 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw7vg\" (UniqueName: \"kubernetes.io/projected/4a8f2fe2-7084-419b-90cc-880b395eea7d-kube-api-access-qw7vg\") pod \"nova-cell1-conductor-0\" (UID: \"4a8f2fe2-7084-419b-90cc-880b395eea7d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.870314 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a8f2fe2-7084-419b-90cc-880b395eea7d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"4a8f2fe2-7084-419b-90cc-880b395eea7d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.870517 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a8f2fe2-7084-419b-90cc-880b395eea7d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"4a8f2fe2-7084-419b-90cc-880b395eea7d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.971889 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a8f2fe2-7084-419b-90cc-880b395eea7d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"4a8f2fe2-7084-419b-90cc-880b395eea7d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.971970 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a8f2fe2-7084-419b-90cc-880b395eea7d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"4a8f2fe2-7084-419b-90cc-880b395eea7d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.972045 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw7vg\" (UniqueName: \"kubernetes.io/projected/4a8f2fe2-7084-419b-90cc-880b395eea7d-kube-api-access-qw7vg\") pod \"nova-cell1-conductor-0\" (UID: \"4a8f2fe2-7084-419b-90cc-880b395eea7d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.985204 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a8f2fe2-7084-419b-90cc-880b395eea7d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"4a8f2fe2-7084-419b-90cc-880b395eea7d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.986610 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a8f2fe2-7084-419b-90cc-880b395eea7d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"4a8f2fe2-7084-419b-90cc-880b395eea7d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:30 crc kubenswrapper[4912]: I1208 21:43:30.990257 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw7vg\" (UniqueName: \"kubernetes.io/projected/4a8f2fe2-7084-419b-90cc-880b395eea7d-kube-api-access-qw7vg\") pod \"nova-cell1-conductor-0\" (UID: \"4a8f2fe2-7084-419b-90cc-880b395eea7d\") " pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.165884 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.174052 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.277175 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhcqb\" (UniqueName: \"kubernetes.io/projected/b9f4962b-16a3-4bf6-a747-7a6cb091023e-kube-api-access-qhcqb\") pod \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\" (UID: \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\") " Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.277546 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f4962b-16a3-4bf6-a747-7a6cb091023e-combined-ca-bundle\") pod \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\" (UID: \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\") " Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.277708 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f4962b-16a3-4bf6-a747-7a6cb091023e-config-data\") pod \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\" (UID: \"b9f4962b-16a3-4bf6-a747-7a6cb091023e\") " Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.281600 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9f4962b-16a3-4bf6-a747-7a6cb091023e-kube-api-access-qhcqb" (OuterVolumeSpecName: "kube-api-access-qhcqb") pod "b9f4962b-16a3-4bf6-a747-7a6cb091023e" (UID: "b9f4962b-16a3-4bf6-a747-7a6cb091023e"). InnerVolumeSpecName "kube-api-access-qhcqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.310975 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f4962b-16a3-4bf6-a747-7a6cb091023e-config-data" (OuterVolumeSpecName: "config-data") pod "b9f4962b-16a3-4bf6-a747-7a6cb091023e" (UID: "b9f4962b-16a3-4bf6-a747-7a6cb091023e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.312613 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f4962b-16a3-4bf6-a747-7a6cb091023e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9f4962b-16a3-4bf6-a747-7a6cb091023e" (UID: "b9f4962b-16a3-4bf6-a747-7a6cb091023e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.380576 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhcqb\" (UniqueName: \"kubernetes.io/projected/b9f4962b-16a3-4bf6-a747-7a6cb091023e-kube-api-access-qhcqb\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.380603 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f4962b-16a3-4bf6-a747-7a6cb091023e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.380611 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f4962b-16a3-4bf6-a747-7a6cb091023e-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.616826 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 08 21:43:31 crc kubenswrapper[4912]: W1208 21:43:31.621501 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a8f2fe2_7084_419b_90cc_880b395eea7d.slice/crio-8e9231ee7ee7593f4ac38bb8629964a7402083d0760030356018975289bd9319 WatchSource:0}: Error finding container 8e9231ee7ee7593f4ac38bb8629964a7402083d0760030356018975289bd9319: Status 404 returned error can't find the container with id 8e9231ee7ee7593f4ac38bb8629964a7402083d0760030356018975289bd9319 Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.730380 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"4a8f2fe2-7084-419b-90cc-880b395eea7d","Type":"ContainerStarted","Data":"8e9231ee7ee7593f4ac38bb8629964a7402083d0760030356018975289bd9319"} Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.733269 4912 generic.go:334] "Generic (PLEG): container finished" podID="b9f4962b-16a3-4bf6-a747-7a6cb091023e" containerID="5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f" exitCode=0 Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.733299 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b9f4962b-16a3-4bf6-a747-7a6cb091023e","Type":"ContainerDied","Data":"5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f"} Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.733317 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b9f4962b-16a3-4bf6-a747-7a6cb091023e","Type":"ContainerDied","Data":"c7a2bd8c7fa8d79dd07d1a3c5a2300152bae0a0b27a3d33dcd1824fb0831111b"} Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.733333 4912 scope.go:117] "RemoveContainer" containerID="5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.733440 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.771267 4912 scope.go:117] "RemoveContainer" containerID="5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f" Dec 08 21:43:31 crc kubenswrapper[4912]: E1208 21:43:31.772597 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f\": container with ID starting with 5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f not found: ID does not exist" containerID="5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.772642 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f"} err="failed to get container status \"5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f\": rpc error: code = NotFound desc = could not find container \"5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f\": container with ID starting with 5fc206977f524e1888deb3e86ccca00c695d9b178b687417416ae0d730cf1e0f not found: ID does not exist" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.799519 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.809085 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.855562 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:43:31 crc kubenswrapper[4912]: E1208 21:43:31.856338 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9f4962b-16a3-4bf6-a747-7a6cb091023e" containerName="nova-scheduler-scheduler" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.856440 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9f4962b-16a3-4bf6-a747-7a6cb091023e" containerName="nova-scheduler-scheduler" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.857028 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9f4962b-16a3-4bf6-a747-7a6cb091023e" containerName="nova-scheduler-scheduler" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.857953 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.862552 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.886385 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.994602 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/263bb3b6-a184-4b85-9644-5c1f58345c7c-config-data\") pod \"nova-scheduler-0\" (UID: \"263bb3b6-a184-4b85-9644-5c1f58345c7c\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.995106 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/263bb3b6-a184-4b85-9644-5c1f58345c7c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"263bb3b6-a184-4b85-9644-5c1f58345c7c\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:31 crc kubenswrapper[4912]: I1208 21:43:31.995295 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct2tt\" (UniqueName: \"kubernetes.io/projected/263bb3b6-a184-4b85-9644-5c1f58345c7c-kube-api-access-ct2tt\") pod \"nova-scheduler-0\" (UID: \"263bb3b6-a184-4b85-9644-5c1f58345c7c\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.096910 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/263bb3b6-a184-4b85-9644-5c1f58345c7c-config-data\") pod \"nova-scheduler-0\" (UID: \"263bb3b6-a184-4b85-9644-5c1f58345c7c\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.097078 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/263bb3b6-a184-4b85-9644-5c1f58345c7c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"263bb3b6-a184-4b85-9644-5c1f58345c7c\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.097113 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct2tt\" (UniqueName: \"kubernetes.io/projected/263bb3b6-a184-4b85-9644-5c1f58345c7c-kube-api-access-ct2tt\") pod \"nova-scheduler-0\" (UID: \"263bb3b6-a184-4b85-9644-5c1f58345c7c\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.102417 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/263bb3b6-a184-4b85-9644-5c1f58345c7c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"263bb3b6-a184-4b85-9644-5c1f58345c7c\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.104915 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/263bb3b6-a184-4b85-9644-5c1f58345c7c-config-data\") pod \"nova-scheduler-0\" (UID: \"263bb3b6-a184-4b85-9644-5c1f58345c7c\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.114592 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct2tt\" (UniqueName: \"kubernetes.io/projected/263bb3b6-a184-4b85-9644-5c1f58345c7c-kube-api-access-ct2tt\") pod \"nova-scheduler-0\" (UID: \"263bb3b6-a184-4b85-9644-5c1f58345c7c\") " pod="openstack/nova-scheduler-0" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.191084 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.465749 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9f4962b-16a3-4bf6-a747-7a6cb091023e" path="/var/lib/kubelet/pods/b9f4962b-16a3-4bf6-a747-7a6cb091023e/volumes" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.673370 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.744662 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"263bb3b6-a184-4b85-9644-5c1f58345c7c","Type":"ContainerStarted","Data":"294a5f8454bef7948581153b56dda146d7efe24f5d5aff5577b8519d4429289e"} Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.746178 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"4a8f2fe2-7084-419b-90cc-880b395eea7d","Type":"ContainerStarted","Data":"c16595caa421d25a331561fe998794ac7a0d45b941403ed595ccc9b7ff6edadf"} Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.747781 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.750456 4912 generic.go:334] "Generic (PLEG): container finished" podID="32c6181b-65e4-403f-aa0c-43ff60a840ea" containerID="8e8efc064eafb595f34f248ad02afefbcbcc421b82285a99ecc83a1e5a96b1b3" exitCode=0 Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.750504 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"32c6181b-65e4-403f-aa0c-43ff60a840ea","Type":"ContainerDied","Data":"8e8efc064eafb595f34f248ad02afefbcbcc421b82285a99ecc83a1e5a96b1b3"} Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.750531 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"32c6181b-65e4-403f-aa0c-43ff60a840ea","Type":"ContainerDied","Data":"69a12e5f4f43fc4c75dc404355d8772f4997b8b3e035f433398748d63ebc7b39"} Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.750545 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69a12e5f4f43fc4c75dc404355d8772f4997b8b3e035f433398748d63ebc7b39" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.766787 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.76672413 podStartE2EDuration="2.76672413s" podCreationTimestamp="2025-12-08 21:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:32.763523917 +0000 UTC m=+1494.626526010" watchObservedRunningTime="2025-12-08 21:43:32.76672413 +0000 UTC m=+1494.629726213" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.850590 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.915862 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66c9b\" (UniqueName: \"kubernetes.io/projected/32c6181b-65e4-403f-aa0c-43ff60a840ea-kube-api-access-66c9b\") pod \"32c6181b-65e4-403f-aa0c-43ff60a840ea\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.916057 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32c6181b-65e4-403f-aa0c-43ff60a840ea-logs\") pod \"32c6181b-65e4-403f-aa0c-43ff60a840ea\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.916184 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c6181b-65e4-403f-aa0c-43ff60a840ea-combined-ca-bundle\") pod \"32c6181b-65e4-403f-aa0c-43ff60a840ea\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.916297 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32c6181b-65e4-403f-aa0c-43ff60a840ea-config-data\") pod \"32c6181b-65e4-403f-aa0c-43ff60a840ea\" (UID: \"32c6181b-65e4-403f-aa0c-43ff60a840ea\") " Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.916710 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32c6181b-65e4-403f-aa0c-43ff60a840ea-logs" (OuterVolumeSpecName: "logs") pod "32c6181b-65e4-403f-aa0c-43ff60a840ea" (UID: "32c6181b-65e4-403f-aa0c-43ff60a840ea"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.917536 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32c6181b-65e4-403f-aa0c-43ff60a840ea-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.927149 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32c6181b-65e4-403f-aa0c-43ff60a840ea-kube-api-access-66c9b" (OuterVolumeSpecName: "kube-api-access-66c9b") pod "32c6181b-65e4-403f-aa0c-43ff60a840ea" (UID: "32c6181b-65e4-403f-aa0c-43ff60a840ea"). InnerVolumeSpecName "kube-api-access-66c9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.942883 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c6181b-65e4-403f-aa0c-43ff60a840ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32c6181b-65e4-403f-aa0c-43ff60a840ea" (UID: "32c6181b-65e4-403f-aa0c-43ff60a840ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:32 crc kubenswrapper[4912]: I1208 21:43:32.963346 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c6181b-65e4-403f-aa0c-43ff60a840ea-config-data" (OuterVolumeSpecName: "config-data") pod "32c6181b-65e4-403f-aa0c-43ff60a840ea" (UID: "32c6181b-65e4-403f-aa0c-43ff60a840ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.021185 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66c9b\" (UniqueName: \"kubernetes.io/projected/32c6181b-65e4-403f-aa0c-43ff60a840ea-kube-api-access-66c9b\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.021226 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c6181b-65e4-403f-aa0c-43ff60a840ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.021238 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32c6181b-65e4-403f-aa0c-43ff60a840ea-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.079731 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.079798 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.772140 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"263bb3b6-a184-4b85-9644-5c1f58345c7c","Type":"ContainerStarted","Data":"d874680d24526330130328a54f7743a15e4f5a88fede36f385da34d6f13daceb"} Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.772193 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.795996 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.795969856 podStartE2EDuration="2.795969856s" podCreationTimestamp="2025-12-08 21:43:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:33.789976648 +0000 UTC m=+1495.652978731" watchObservedRunningTime="2025-12-08 21:43:33.795969856 +0000 UTC m=+1495.658971939" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.819612 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.828920 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.856129 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 08 21:43:33 crc kubenswrapper[4912]: E1208 21:43:33.856623 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32c6181b-65e4-403f-aa0c-43ff60a840ea" containerName="nova-api-log" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.856652 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="32c6181b-65e4-403f-aa0c-43ff60a840ea" containerName="nova-api-log" Dec 08 21:43:33 crc kubenswrapper[4912]: E1208 21:43:33.856669 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32c6181b-65e4-403f-aa0c-43ff60a840ea" containerName="nova-api-api" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.856680 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="32c6181b-65e4-403f-aa0c-43ff60a840ea" containerName="nova-api-api" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.856883 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="32c6181b-65e4-403f-aa0c-43ff60a840ea" containerName="nova-api-log" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.856920 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="32c6181b-65e4-403f-aa0c-43ff60a840ea" containerName="nova-api-api" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.858272 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.861676 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.884004 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.946884 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " pod="openstack/nova-api-0" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.947058 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-config-data\") pod \"nova-api-0\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " pod="openstack/nova-api-0" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.947363 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz8lc\" (UniqueName: \"kubernetes.io/projected/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-kube-api-access-qz8lc\") pod \"nova-api-0\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " pod="openstack/nova-api-0" Dec 08 21:43:33 crc kubenswrapper[4912]: I1208 21:43:33.947498 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-logs\") pod \"nova-api-0\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " pod="openstack/nova-api-0" Dec 08 21:43:34 crc kubenswrapper[4912]: I1208 21:43:34.049154 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " pod="openstack/nova-api-0" Dec 08 21:43:34 crc kubenswrapper[4912]: I1208 21:43:34.049834 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-config-data\") pod \"nova-api-0\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " pod="openstack/nova-api-0" Dec 08 21:43:34 crc kubenswrapper[4912]: I1208 21:43:34.050517 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz8lc\" (UniqueName: \"kubernetes.io/projected/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-kube-api-access-qz8lc\") pod \"nova-api-0\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " pod="openstack/nova-api-0" Dec 08 21:43:34 crc kubenswrapper[4912]: I1208 21:43:34.050899 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-logs\") pod \"nova-api-0\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " pod="openstack/nova-api-0" Dec 08 21:43:34 crc kubenswrapper[4912]: I1208 21:43:34.051364 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-logs\") pod \"nova-api-0\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " pod="openstack/nova-api-0" Dec 08 21:43:34 crc kubenswrapper[4912]: I1208 21:43:34.054475 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " pod="openstack/nova-api-0" Dec 08 21:43:34 crc kubenswrapper[4912]: I1208 21:43:34.055551 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-config-data\") pod \"nova-api-0\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " pod="openstack/nova-api-0" Dec 08 21:43:34 crc kubenswrapper[4912]: I1208 21:43:34.069018 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz8lc\" (UniqueName: \"kubernetes.io/projected/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-kube-api-access-qz8lc\") pod \"nova-api-0\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " pod="openstack/nova-api-0" Dec 08 21:43:34 crc kubenswrapper[4912]: I1208 21:43:34.203555 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:43:34 crc kubenswrapper[4912]: I1208 21:43:34.441366 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32c6181b-65e4-403f-aa0c-43ff60a840ea" path="/var/lib/kubelet/pods/32c6181b-65e4-403f-aa0c-43ff60a840ea/volumes" Dec 08 21:43:34 crc kubenswrapper[4912]: I1208 21:43:34.676100 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:43:34 crc kubenswrapper[4912]: W1208 21:43:34.678308 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7edd3ae2_599b_40d9_bdb7_52df11dfbb13.slice/crio-b59144d791e64b8767c07d3bbf7b8c484ce2ea5b5c0eefa7dc925f44f8339e2f WatchSource:0}: Error finding container b59144d791e64b8767c07d3bbf7b8c484ce2ea5b5c0eefa7dc925f44f8339e2f: Status 404 returned error can't find the container with id b59144d791e64b8767c07d3bbf7b8c484ce2ea5b5c0eefa7dc925f44f8339e2f Dec 08 21:43:34 crc kubenswrapper[4912]: I1208 21:43:34.779565 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7edd3ae2-599b-40d9-bdb7-52df11dfbb13","Type":"ContainerStarted","Data":"b59144d791e64b8767c07d3bbf7b8c484ce2ea5b5c0eefa7dc925f44f8339e2f"} Dec 08 21:43:35 crc kubenswrapper[4912]: I1208 21:43:35.791990 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7edd3ae2-599b-40d9-bdb7-52df11dfbb13","Type":"ContainerStarted","Data":"91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0"} Dec 08 21:43:35 crc kubenswrapper[4912]: I1208 21:43:35.792317 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7edd3ae2-599b-40d9-bdb7-52df11dfbb13","Type":"ContainerStarted","Data":"ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b"} Dec 08 21:43:35 crc kubenswrapper[4912]: I1208 21:43:35.818222 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.818187643 podStartE2EDuration="2.818187643s" podCreationTimestamp="2025-12-08 21:43:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:35.811352775 +0000 UTC m=+1497.674354948" watchObservedRunningTime="2025-12-08 21:43:35.818187643 +0000 UTC m=+1497.681189766" Dec 08 21:43:36 crc kubenswrapper[4912]: I1208 21:43:36.193139 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 08 21:43:37 crc kubenswrapper[4912]: I1208 21:43:37.191945 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 08 21:43:38 crc kubenswrapper[4912]: I1208 21:43:38.080410 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 08 21:43:38 crc kubenswrapper[4912]: I1208 21:43:38.080463 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 08 21:43:39 crc kubenswrapper[4912]: I1208 21:43:39.096180 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.187:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:43:39 crc kubenswrapper[4912]: I1208 21:43:39.096232 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.187:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:43:42 crc kubenswrapper[4912]: I1208 21:43:42.191821 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 08 21:43:42 crc kubenswrapper[4912]: I1208 21:43:42.220272 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 08 21:43:42 crc kubenswrapper[4912]: I1208 21:43:42.889963 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 08 21:43:44 crc kubenswrapper[4912]: I1208 21:43:44.203700 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:43:44 crc kubenswrapper[4912]: I1208 21:43:44.203757 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:43:45 crc kubenswrapper[4912]: I1208 21:43:45.286303 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.190:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:43:45 crc kubenswrapper[4912]: I1208 21:43:45.286492 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.190:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 21:43:48 crc kubenswrapper[4912]: I1208 21:43:48.086388 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 08 21:43:48 crc kubenswrapper[4912]: I1208 21:43:48.090450 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 08 21:43:48 crc kubenswrapper[4912]: I1208 21:43:48.091317 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 08 21:43:48 crc kubenswrapper[4912]: I1208 21:43:48.933467 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 08 21:43:49 crc kubenswrapper[4912]: I1208 21:43:49.861390 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:49 crc kubenswrapper[4912]: I1208 21:43:49.937682 4912 generic.go:334] "Generic (PLEG): container finished" podID="a47973b8-33a2-413a-bbaa-15d1ad677c6a" containerID="b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6" exitCode=137 Dec 08 21:43:49 crc kubenswrapper[4912]: I1208 21:43:49.937726 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:49 crc kubenswrapper[4912]: I1208 21:43:49.937784 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a47973b8-33a2-413a-bbaa-15d1ad677c6a","Type":"ContainerDied","Data":"b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6"} Dec 08 21:43:49 crc kubenswrapper[4912]: I1208 21:43:49.937808 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a47973b8-33a2-413a-bbaa-15d1ad677c6a","Type":"ContainerDied","Data":"f16a9effa1417eacd4d049da9e73b54530b9db3d5aa632adb000a9c215e690ce"} Dec 08 21:43:49 crc kubenswrapper[4912]: I1208 21:43:49.937824 4912 scope.go:117] "RemoveContainer" containerID="b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6" Dec 08 21:43:49 crc kubenswrapper[4912]: I1208 21:43:49.964252 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a47973b8-33a2-413a-bbaa-15d1ad677c6a-combined-ca-bundle\") pod \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\" (UID: \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\") " Dec 08 21:43:49 crc kubenswrapper[4912]: I1208 21:43:49.964593 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a47973b8-33a2-413a-bbaa-15d1ad677c6a-config-data\") pod \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\" (UID: \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\") " Dec 08 21:43:49 crc kubenswrapper[4912]: I1208 21:43:49.964845 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfmjj\" (UniqueName: \"kubernetes.io/projected/a47973b8-33a2-413a-bbaa-15d1ad677c6a-kube-api-access-vfmjj\") pod \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\" (UID: \"a47973b8-33a2-413a-bbaa-15d1ad677c6a\") " Dec 08 21:43:49 crc kubenswrapper[4912]: I1208 21:43:49.978216 4912 scope.go:117] "RemoveContainer" containerID="b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6" Dec 08 21:43:49 crc kubenswrapper[4912]: I1208 21:43:49.978662 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a47973b8-33a2-413a-bbaa-15d1ad677c6a-kube-api-access-vfmjj" (OuterVolumeSpecName: "kube-api-access-vfmjj") pod "a47973b8-33a2-413a-bbaa-15d1ad677c6a" (UID: "a47973b8-33a2-413a-bbaa-15d1ad677c6a"). InnerVolumeSpecName "kube-api-access-vfmjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:43:49 crc kubenswrapper[4912]: E1208 21:43:49.988181 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6\": container with ID starting with b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6 not found: ID does not exist" containerID="b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6" Dec 08 21:43:49 crc kubenswrapper[4912]: I1208 21:43:49.988231 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6"} err="failed to get container status \"b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6\": rpc error: code = NotFound desc = could not find container \"b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6\": container with ID starting with b0571d8fffbc7b24daf5a3ced7068b3093e9991cd9b593e127d81ec0789c13b6 not found: ID does not exist" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.008214 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a47973b8-33a2-413a-bbaa-15d1ad677c6a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a47973b8-33a2-413a-bbaa-15d1ad677c6a" (UID: "a47973b8-33a2-413a-bbaa-15d1ad677c6a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.014235 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a47973b8-33a2-413a-bbaa-15d1ad677c6a-config-data" (OuterVolumeSpecName: "config-data") pod "a47973b8-33a2-413a-bbaa-15d1ad677c6a" (UID: "a47973b8-33a2-413a-bbaa-15d1ad677c6a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.067632 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a47973b8-33a2-413a-bbaa-15d1ad677c6a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.067898 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a47973b8-33a2-413a-bbaa-15d1ad677c6a-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.067970 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfmjj\" (UniqueName: \"kubernetes.io/projected/a47973b8-33a2-413a-bbaa-15d1ad677c6a-kube-api-access-vfmjj\") on node \"crc\" DevicePath \"\"" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.273300 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.284294 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.297625 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:43:50 crc kubenswrapper[4912]: E1208 21:43:50.298172 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a47973b8-33a2-413a-bbaa-15d1ad677c6a" containerName="nova-cell1-novncproxy-novncproxy" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.298200 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="a47973b8-33a2-413a-bbaa-15d1ad677c6a" containerName="nova-cell1-novncproxy-novncproxy" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.298461 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="a47973b8-33a2-413a-bbaa-15d1ad677c6a" containerName="nova-cell1-novncproxy-novncproxy" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.299364 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.301495 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.301727 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.302478 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.315752 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.374059 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/7298f5bf-63e0-446a-b351-bd2dea532216-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.374183 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7298f5bf-63e0-446a-b351-bd2dea532216-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.374234 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvl4r\" (UniqueName: \"kubernetes.io/projected/7298f5bf-63e0-446a-b351-bd2dea532216-kube-api-access-kvl4r\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.374315 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7298f5bf-63e0-446a-b351-bd2dea532216-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.374356 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/7298f5bf-63e0-446a-b351-bd2dea532216-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.440101 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a47973b8-33a2-413a-bbaa-15d1ad677c6a" path="/var/lib/kubelet/pods/a47973b8-33a2-413a-bbaa-15d1ad677c6a/volumes" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.476396 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/7298f5bf-63e0-446a-b351-bd2dea532216-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.476668 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7298f5bf-63e0-446a-b351-bd2dea532216-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.476730 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvl4r\" (UniqueName: \"kubernetes.io/projected/7298f5bf-63e0-446a-b351-bd2dea532216-kube-api-access-kvl4r\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.476795 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7298f5bf-63e0-446a-b351-bd2dea532216-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.476832 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/7298f5bf-63e0-446a-b351-bd2dea532216-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.480603 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/7298f5bf-63e0-446a-b351-bd2dea532216-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.481436 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7298f5bf-63e0-446a-b351-bd2dea532216-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.481742 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/7298f5bf-63e0-446a-b351-bd2dea532216-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.483669 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7298f5bf-63e0-446a-b351-bd2dea532216-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.500474 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvl4r\" (UniqueName: \"kubernetes.io/projected/7298f5bf-63e0-446a-b351-bd2dea532216-kube-api-access-kvl4r\") pod \"nova-cell1-novncproxy-0\" (UID: \"7298f5bf-63e0-446a-b351-bd2dea532216\") " pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:50 crc kubenswrapper[4912]: I1208 21:43:50.618612 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:51 crc kubenswrapper[4912]: I1208 21:43:51.131873 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 08 21:43:51 crc kubenswrapper[4912]: I1208 21:43:51.957319 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7298f5bf-63e0-446a-b351-bd2dea532216","Type":"ContainerStarted","Data":"da6313243816e4fe3c03ac22b6d191b0152002a1645a7736c9e8a9a30d354878"} Dec 08 21:43:51 crc kubenswrapper[4912]: I1208 21:43:51.957851 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7298f5bf-63e0-446a-b351-bd2dea532216","Type":"ContainerStarted","Data":"825cd1043eed67058fe699be6c2f204ed77f3a9a1de22d4a635d492087c8a9bc"} Dec 08 21:43:51 crc kubenswrapper[4912]: I1208 21:43:51.983876 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.983850109 podStartE2EDuration="1.983850109s" podCreationTimestamp="2025-12-08 21:43:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:51.976849358 +0000 UTC m=+1513.839851441" watchObservedRunningTime="2025-12-08 21:43:51.983850109 +0000 UTC m=+1513.846852192" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.207520 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.207846 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.208149 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.208196 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.210812 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.210962 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.405846 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-2nfps"] Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.409137 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.447090 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-2nfps"] Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.450596 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.450711 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2w4d\" (UniqueName: \"kubernetes.io/projected/16e236a9-1cad-40e9-8f00-1f8261e1c96a-kube-api-access-t2w4d\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.450796 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-config\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.450826 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.450852 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.450899 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.552439 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.552810 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2w4d\" (UniqueName: \"kubernetes.io/projected/16e236a9-1cad-40e9-8f00-1f8261e1c96a-kube-api-access-t2w4d\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.552865 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-config\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.552890 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.552924 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.552970 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.553790 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.553790 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-config\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.553881 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.554024 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.554203 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16e236a9-1cad-40e9-8f00-1f8261e1c96a-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.589324 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2w4d\" (UniqueName: \"kubernetes.io/projected/16e236a9-1cad-40e9-8f00-1f8261e1c96a-kube-api-access-t2w4d\") pod \"dnsmasq-dns-89c5cd4d5-2nfps\" (UID: \"16e236a9-1cad-40e9-8f00-1f8261e1c96a\") " pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:54 crc kubenswrapper[4912]: I1208 21:43:54.745643 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:55 crc kubenswrapper[4912]: I1208 21:43:55.457677 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-2nfps"] Dec 08 21:43:55 crc kubenswrapper[4912]: W1208 21:43:55.472776 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16e236a9_1cad_40e9_8f00_1f8261e1c96a.slice/crio-326f3068209c0f58da40bb81103352aedf3d88a17665cdfd224d8ce1f8e17a04 WatchSource:0}: Error finding container 326f3068209c0f58da40bb81103352aedf3d88a17665cdfd224d8ce1f8e17a04: Status 404 returned error can't find the container with id 326f3068209c0f58da40bb81103352aedf3d88a17665cdfd224d8ce1f8e17a04 Dec 08 21:43:55 crc kubenswrapper[4912]: I1208 21:43:55.618685 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:43:56 crc kubenswrapper[4912]: I1208 21:43:56.021395 4912 generic.go:334] "Generic (PLEG): container finished" podID="16e236a9-1cad-40e9-8f00-1f8261e1c96a" containerID="49a1e9c3731c09cd1e65d4afff4dfbb464b72e038874cdbb744bfe0c30ab7e6e" exitCode=0 Dec 08 21:43:56 crc kubenswrapper[4912]: I1208 21:43:56.021501 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" event={"ID":"16e236a9-1cad-40e9-8f00-1f8261e1c96a","Type":"ContainerDied","Data":"49a1e9c3731c09cd1e65d4afff4dfbb464b72e038874cdbb744bfe0c30ab7e6e"} Dec 08 21:43:56 crc kubenswrapper[4912]: I1208 21:43:56.021553 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" event={"ID":"16e236a9-1cad-40e9-8f00-1f8261e1c96a","Type":"ContainerStarted","Data":"326f3068209c0f58da40bb81103352aedf3d88a17665cdfd224d8ce1f8e17a04"} Dec 08 21:43:57 crc kubenswrapper[4912]: I1208 21:43:57.037495 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" event={"ID":"16e236a9-1cad-40e9-8f00-1f8261e1c96a","Type":"ContainerStarted","Data":"337e293cd9acd7a8664eccaf80bdcbaf907beddcb03a76ac2aaecf0ca5001502"} Dec 08 21:43:57 crc kubenswrapper[4912]: I1208 21:43:57.046616 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:43:57 crc kubenswrapper[4912]: I1208 21:43:57.063017 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:43:57 crc kubenswrapper[4912]: I1208 21:43:57.063418 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" containerName="nova-api-log" containerID="cri-o://ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b" gracePeriod=30 Dec 08 21:43:57 crc kubenswrapper[4912]: I1208 21:43:57.063640 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" containerName="nova-api-api" containerID="cri-o://91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0" gracePeriod=30 Dec 08 21:43:57 crc kubenswrapper[4912]: I1208 21:43:57.078442 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" podStartSLOduration=3.078416961 podStartE2EDuration="3.078416961s" podCreationTimestamp="2025-12-08 21:43:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:43:57.074989342 +0000 UTC m=+1518.937991425" watchObservedRunningTime="2025-12-08 21:43:57.078416961 +0000 UTC m=+1518.941419044" Dec 08 21:43:58 crc kubenswrapper[4912]: I1208 21:43:58.051172 4912 generic.go:334] "Generic (PLEG): container finished" podID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" containerID="ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b" exitCode=143 Dec 08 21:43:58 crc kubenswrapper[4912]: I1208 21:43:58.051267 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7edd3ae2-599b-40d9-bdb7-52df11dfbb13","Type":"ContainerDied","Data":"ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b"} Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.619030 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.648369 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.664929 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.721875 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-combined-ca-bundle\") pod \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.724412 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-logs\") pod \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.724997 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-logs" (OuterVolumeSpecName: "logs") pod "7edd3ae2-599b-40d9-bdb7-52df11dfbb13" (UID: "7edd3ae2-599b-40d9-bdb7-52df11dfbb13"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.725243 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz8lc\" (UniqueName: \"kubernetes.io/projected/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-kube-api-access-qz8lc\") pod \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.725410 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-config-data\") pod \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\" (UID: \"7edd3ae2-599b-40d9-bdb7-52df11dfbb13\") " Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.729104 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.733883 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-kube-api-access-qz8lc" (OuterVolumeSpecName: "kube-api-access-qz8lc") pod "7edd3ae2-599b-40d9-bdb7-52df11dfbb13" (UID: "7edd3ae2-599b-40d9-bdb7-52df11dfbb13"). InnerVolumeSpecName "kube-api-access-qz8lc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.757470 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7edd3ae2-599b-40d9-bdb7-52df11dfbb13" (UID: "7edd3ae2-599b-40d9-bdb7-52df11dfbb13"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.768240 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-config-data" (OuterVolumeSpecName: "config-data") pod "7edd3ae2-599b-40d9-bdb7-52df11dfbb13" (UID: "7edd3ae2-599b-40d9-bdb7-52df11dfbb13"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.830623 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz8lc\" (UniqueName: \"kubernetes.io/projected/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-kube-api-access-qz8lc\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.830666 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:00 crc kubenswrapper[4912]: I1208 21:44:00.830677 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7edd3ae2-599b-40d9-bdb7-52df11dfbb13-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.080192 4912 generic.go:334] "Generic (PLEG): container finished" podID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" containerID="91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0" exitCode=0 Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.082178 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.089514 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7edd3ae2-599b-40d9-bdb7-52df11dfbb13","Type":"ContainerDied","Data":"91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0"} Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.089596 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7edd3ae2-599b-40d9-bdb7-52df11dfbb13","Type":"ContainerDied","Data":"b59144d791e64b8767c07d3bbf7b8c484ce2ea5b5c0eefa7dc925f44f8339e2f"} Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.089624 4912 scope.go:117] "RemoveContainer" containerID="91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.149047 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.165178 4912 scope.go:117] "RemoveContainer" containerID="ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.168211 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.183859 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.198258 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:01 crc kubenswrapper[4912]: E1208 21:44:01.198825 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" containerName="nova-api-api" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.198845 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" containerName="nova-api-api" Dec 08 21:44:01 crc kubenswrapper[4912]: E1208 21:44:01.198890 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" containerName="nova-api-log" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.198898 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" containerName="nova-api-log" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.199185 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" containerName="nova-api-log" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.199206 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" containerName="nova-api-api" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.200443 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.204387 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.204557 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.204608 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.211131 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.263163 4912 scope.go:117] "RemoveContainer" containerID="91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0" Dec 08 21:44:01 crc kubenswrapper[4912]: E1208 21:44:01.264426 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0\": container with ID starting with 91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0 not found: ID does not exist" containerID="91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.264545 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0"} err="failed to get container status \"91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0\": rpc error: code = NotFound desc = could not find container \"91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0\": container with ID starting with 91b82bfed8f90784a7a526c3742826748a8903e4942b04f6ea0177f0c20de4d0 not found: ID does not exist" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.264635 4912 scope.go:117] "RemoveContainer" containerID="ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b" Dec 08 21:44:01 crc kubenswrapper[4912]: E1208 21:44:01.265534 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b\": container with ID starting with ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b not found: ID does not exist" containerID="ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.265577 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b"} err="failed to get container status \"ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b\": rpc error: code = NotFound desc = could not find container \"ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b\": container with ID starting with ca804bb969f88bbc28053d56c623387e300b9a6bba041a9ebc8316f3037bf49b not found: ID does not exist" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.346986 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3f43b2-7c72-4991-adb2-17f74320dcd0-logs\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.347087 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-config-data\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.347403 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-public-tls-certs\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.347530 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.347574 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g8lk\" (UniqueName: \"kubernetes.io/projected/8c3f43b2-7c72-4991-adb2-17f74320dcd0-kube-api-access-5g8lk\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.347894 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.377103 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-qbjjp"] Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.378436 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.382545 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.382740 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.389969 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-qbjjp"] Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.449864 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-scripts\") pod \"nova-cell1-cell-mapping-qbjjp\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.449936 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-public-tls-certs\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.449982 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.450004 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g8lk\" (UniqueName: \"kubernetes.io/projected/8c3f43b2-7c72-4991-adb2-17f74320dcd0-kube-api-access-5g8lk\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.450047 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-config-data\") pod \"nova-cell1-cell-mapping-qbjjp\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.450076 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvnck\" (UniqueName: \"kubernetes.io/projected/eb72a504-41c4-43a7-8875-32807e8d27c0-kube-api-access-kvnck\") pod \"nova-cell1-cell-mapping-qbjjp\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.450122 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.450151 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3f43b2-7c72-4991-adb2-17f74320dcd0-logs\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.450171 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-config-data\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.450199 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-qbjjp\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.454560 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3f43b2-7c72-4991-adb2-17f74320dcd0-logs\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.457637 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.457717 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.457943 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-public-tls-certs\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.469804 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-config-data\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.472443 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g8lk\" (UniqueName: \"kubernetes.io/projected/8c3f43b2-7c72-4991-adb2-17f74320dcd0-kube-api-access-5g8lk\") pod \"nova-api-0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.534954 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.551990 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-qbjjp\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.552140 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-scripts\") pod \"nova-cell1-cell-mapping-qbjjp\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.552252 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-config-data\") pod \"nova-cell1-cell-mapping-qbjjp\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.552291 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvnck\" (UniqueName: \"kubernetes.io/projected/eb72a504-41c4-43a7-8875-32807e8d27c0-kube-api-access-kvnck\") pod \"nova-cell1-cell-mapping-qbjjp\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.557079 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-config-data\") pod \"nova-cell1-cell-mapping-qbjjp\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.559501 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-scripts\") pod \"nova-cell1-cell-mapping-qbjjp\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.563871 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-qbjjp\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.572735 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvnck\" (UniqueName: \"kubernetes.io/projected/eb72a504-41c4-43a7-8875-32807e8d27c0-kube-api-access-kvnck\") pod \"nova-cell1-cell-mapping-qbjjp\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:01 crc kubenswrapper[4912]: I1208 21:44:01.721094 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:02 crc kubenswrapper[4912]: I1208 21:44:02.312711 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:02 crc kubenswrapper[4912]: I1208 21:44:02.442725 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7edd3ae2-599b-40d9-bdb7-52df11dfbb13" path="/var/lib/kubelet/pods/7edd3ae2-599b-40d9-bdb7-52df11dfbb13/volumes" Dec 08 21:44:02 crc kubenswrapper[4912]: I1208 21:44:02.491754 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-qbjjp"] Dec 08 21:44:02 crc kubenswrapper[4912]: W1208 21:44:02.492414 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb72a504_41c4_43a7_8875_32807e8d27c0.slice/crio-407fa3dccf3326d2cbbf1b0ac5a139e7ab0b0bd63ce7d877d3c34f48ff5e2622 WatchSource:0}: Error finding container 407fa3dccf3326d2cbbf1b0ac5a139e7ab0b0bd63ce7d877d3c34f48ff5e2622: Status 404 returned error can't find the container with id 407fa3dccf3326d2cbbf1b0ac5a139e7ab0b0bd63ce7d877d3c34f48ff5e2622 Dec 08 21:44:03 crc kubenswrapper[4912]: I1208 21:44:03.132885 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-qbjjp" event={"ID":"eb72a504-41c4-43a7-8875-32807e8d27c0","Type":"ContainerStarted","Data":"2029e01ae50661e9fe1df3a0fb3189a6f138c82f4dd4015664c6289ced89aa0c"} Dec 08 21:44:03 crc kubenswrapper[4912]: I1208 21:44:03.133844 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-qbjjp" event={"ID":"eb72a504-41c4-43a7-8875-32807e8d27c0","Type":"ContainerStarted","Data":"407fa3dccf3326d2cbbf1b0ac5a139e7ab0b0bd63ce7d877d3c34f48ff5e2622"} Dec 08 21:44:03 crc kubenswrapper[4912]: I1208 21:44:03.135199 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c3f43b2-7c72-4991-adb2-17f74320dcd0","Type":"ContainerStarted","Data":"520543a455bb9e8e7b83c52a7574466afde012e8173cba4d473a67a848a4983c"} Dec 08 21:44:03 crc kubenswrapper[4912]: I1208 21:44:03.135222 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c3f43b2-7c72-4991-adb2-17f74320dcd0","Type":"ContainerStarted","Data":"33d87f32e192458ad88b1e67344bd04b19aa9ec44c069e04e5fb7320133e4b0c"} Dec 08 21:44:03 crc kubenswrapper[4912]: I1208 21:44:03.135231 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c3f43b2-7c72-4991-adb2-17f74320dcd0","Type":"ContainerStarted","Data":"ab110dc156dfcf2486f972d7322bf5161a6bbf3061e82007cd6c2ccfb6b01bc1"} Dec 08 21:44:03 crc kubenswrapper[4912]: I1208 21:44:03.156668 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-qbjjp" podStartSLOduration=2.156649396 podStartE2EDuration="2.156649396s" podCreationTimestamp="2025-12-08 21:44:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:03.153189346 +0000 UTC m=+1525.016191429" watchObservedRunningTime="2025-12-08 21:44:03.156649396 +0000 UTC m=+1525.019651469" Dec 08 21:44:04 crc kubenswrapper[4912]: I1208 21:44:04.748076 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-89c5cd4d5-2nfps" Dec 08 21:44:04 crc kubenswrapper[4912]: I1208 21:44:04.774895 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.774872549 podStartE2EDuration="3.774872549s" podCreationTimestamp="2025-12-08 21:44:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:03.177492707 +0000 UTC m=+1525.040494790" watchObservedRunningTime="2025-12-08 21:44:04.774872549 +0000 UTC m=+1526.637874632" Dec 08 21:44:04 crc kubenswrapper[4912]: I1208 21:44:04.814171 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-8jzfb"] Dec 08 21:44:04 crc kubenswrapper[4912]: I1208 21:44:04.814790 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" podUID="5765ec98-7f46-44c4-81bf-506b054e8e06" containerName="dnsmasq-dns" containerID="cri-o://7f6fbba03343d4a80ff37a437561d4c7c555fbc53ff7f8d9ded26e868803eab9" gracePeriod=10 Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.177794 4912 generic.go:334] "Generic (PLEG): container finished" podID="5765ec98-7f46-44c4-81bf-506b054e8e06" containerID="7f6fbba03343d4a80ff37a437561d4c7c555fbc53ff7f8d9ded26e868803eab9" exitCode=0 Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.178413 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" event={"ID":"5765ec98-7f46-44c4-81bf-506b054e8e06","Type":"ContainerDied","Data":"7f6fbba03343d4a80ff37a437561d4c7c555fbc53ff7f8d9ded26e868803eab9"} Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.566422 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.684441 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-dns-swift-storage-0\") pod \"5765ec98-7f46-44c4-81bf-506b054e8e06\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.684884 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd5qd\" (UniqueName: \"kubernetes.io/projected/5765ec98-7f46-44c4-81bf-506b054e8e06-kube-api-access-sd5qd\") pod \"5765ec98-7f46-44c4-81bf-506b054e8e06\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.684969 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-ovsdbserver-sb\") pod \"5765ec98-7f46-44c4-81bf-506b054e8e06\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.685006 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-ovsdbserver-nb\") pod \"5765ec98-7f46-44c4-81bf-506b054e8e06\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.685069 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-config\") pod \"5765ec98-7f46-44c4-81bf-506b054e8e06\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.685150 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-dns-svc\") pod \"5765ec98-7f46-44c4-81bf-506b054e8e06\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.692213 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5765ec98-7f46-44c4-81bf-506b054e8e06-kube-api-access-sd5qd" (OuterVolumeSpecName: "kube-api-access-sd5qd") pod "5765ec98-7f46-44c4-81bf-506b054e8e06" (UID: "5765ec98-7f46-44c4-81bf-506b054e8e06"). InnerVolumeSpecName "kube-api-access-sd5qd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.738192 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5765ec98-7f46-44c4-81bf-506b054e8e06" (UID: "5765ec98-7f46-44c4-81bf-506b054e8e06"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.753474 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5765ec98-7f46-44c4-81bf-506b054e8e06" (UID: "5765ec98-7f46-44c4-81bf-506b054e8e06"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.765163 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5765ec98-7f46-44c4-81bf-506b054e8e06" (UID: "5765ec98-7f46-44c4-81bf-506b054e8e06"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.787675 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-config" (OuterVolumeSpecName: "config") pod "5765ec98-7f46-44c4-81bf-506b054e8e06" (UID: "5765ec98-7f46-44c4-81bf-506b054e8e06"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.788615 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-config\") pod \"5765ec98-7f46-44c4-81bf-506b054e8e06\" (UID: \"5765ec98-7f46-44c4-81bf-506b054e8e06\") " Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.789739 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5765ec98-7f46-44c4-81bf-506b054e8e06" (UID: "5765ec98-7f46-44c4-81bf-506b054e8e06"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:05 crc kubenswrapper[4912]: W1208 21:44:05.789928 4912 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/5765ec98-7f46-44c4-81bf-506b054e8e06/volumes/kubernetes.io~configmap/config Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.789943 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-config" (OuterVolumeSpecName: "config") pod "5765ec98-7f46-44c4-81bf-506b054e8e06" (UID: "5765ec98-7f46-44c4-81bf-506b054e8e06"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.798920 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.798977 4912 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.798992 4912 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-config\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.799009 4912 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.799019 4912 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5765ec98-7f46-44c4-81bf-506b054e8e06-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:05 crc kubenswrapper[4912]: I1208 21:44:05.799034 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd5qd\" (UniqueName: \"kubernetes.io/projected/5765ec98-7f46-44c4-81bf-506b054e8e06-kube-api-access-sd5qd\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:06 crc kubenswrapper[4912]: I1208 21:44:06.198164 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" event={"ID":"5765ec98-7f46-44c4-81bf-506b054e8e06","Type":"ContainerDied","Data":"d2eb7adf28faad44da763686c7bb72224d2ab39fcc5f1898d3892db132d40e88"} Dec 08 21:44:06 crc kubenswrapper[4912]: I1208 21:44:06.198240 4912 scope.go:117] "RemoveContainer" containerID="7f6fbba03343d4a80ff37a437561d4c7c555fbc53ff7f8d9ded26e868803eab9" Dec 08 21:44:06 crc kubenswrapper[4912]: I1208 21:44:06.198454 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" Dec 08 21:44:06 crc kubenswrapper[4912]: I1208 21:44:06.219982 4912 scope.go:117] "RemoveContainer" containerID="ed4913153b72688ccb802611853dabd081880b2318419893fff261a4c4a34a90" Dec 08 21:44:06 crc kubenswrapper[4912]: I1208 21:44:06.237846 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-8jzfb"] Dec 08 21:44:06 crc kubenswrapper[4912]: I1208 21:44:06.252671 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-8jzfb"] Dec 08 21:44:06 crc kubenswrapper[4912]: I1208 21:44:06.445281 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5765ec98-7f46-44c4-81bf-506b054e8e06" path="/var/lib/kubelet/pods/5765ec98-7f46-44c4-81bf-506b054e8e06/volumes" Dec 08 21:44:09 crc kubenswrapper[4912]: I1208 21:44:09.229629 4912 generic.go:334] "Generic (PLEG): container finished" podID="eb72a504-41c4-43a7-8875-32807e8d27c0" containerID="2029e01ae50661e9fe1df3a0fb3189a6f138c82f4dd4015664c6289ced89aa0c" exitCode=0 Dec 08 21:44:09 crc kubenswrapper[4912]: I1208 21:44:09.229732 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-qbjjp" event={"ID":"eb72a504-41c4-43a7-8875-32807e8d27c0","Type":"ContainerDied","Data":"2029e01ae50661e9fe1df3a0fb3189a6f138c82f4dd4015664c6289ced89aa0c"} Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.119581 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757b4f8459-8jzfb" podUID="5765ec98-7f46-44c4-81bf-506b054e8e06" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.183:5353: i/o timeout" Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.693386 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.800715 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-config-data\") pod \"eb72a504-41c4-43a7-8875-32807e8d27c0\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.801124 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-combined-ca-bundle\") pod \"eb72a504-41c4-43a7-8875-32807e8d27c0\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.801320 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvnck\" (UniqueName: \"kubernetes.io/projected/eb72a504-41c4-43a7-8875-32807e8d27c0-kube-api-access-kvnck\") pod \"eb72a504-41c4-43a7-8875-32807e8d27c0\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.801575 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-scripts\") pod \"eb72a504-41c4-43a7-8875-32807e8d27c0\" (UID: \"eb72a504-41c4-43a7-8875-32807e8d27c0\") " Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.808143 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb72a504-41c4-43a7-8875-32807e8d27c0-kube-api-access-kvnck" (OuterVolumeSpecName: "kube-api-access-kvnck") pod "eb72a504-41c4-43a7-8875-32807e8d27c0" (UID: "eb72a504-41c4-43a7-8875-32807e8d27c0"). InnerVolumeSpecName "kube-api-access-kvnck". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.809718 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-scripts" (OuterVolumeSpecName: "scripts") pod "eb72a504-41c4-43a7-8875-32807e8d27c0" (UID: "eb72a504-41c4-43a7-8875-32807e8d27c0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.831538 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-config-data" (OuterVolumeSpecName: "config-data") pod "eb72a504-41c4-43a7-8875-32807e8d27c0" (UID: "eb72a504-41c4-43a7-8875-32807e8d27c0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.832277 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb72a504-41c4-43a7-8875-32807e8d27c0" (UID: "eb72a504-41c4-43a7-8875-32807e8d27c0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.904338 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvnck\" (UniqueName: \"kubernetes.io/projected/eb72a504-41c4-43a7-8875-32807e8d27c0-kube-api-access-kvnck\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.904373 4912 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-scripts\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.904384 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:10 crc kubenswrapper[4912]: I1208 21:44:10.904395 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb72a504-41c4-43a7-8875-32807e8d27c0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:11 crc kubenswrapper[4912]: I1208 21:44:11.278977 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-qbjjp" event={"ID":"eb72a504-41c4-43a7-8875-32807e8d27c0","Type":"ContainerDied","Data":"407fa3dccf3326d2cbbf1b0ac5a139e7ab0b0bd63ce7d877d3c34f48ff5e2622"} Dec 08 21:44:11 crc kubenswrapper[4912]: I1208 21:44:11.279687 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-qbjjp" Dec 08 21:44:11 crc kubenswrapper[4912]: I1208 21:44:11.280274 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="407fa3dccf3326d2cbbf1b0ac5a139e7ab0b0bd63ce7d877d3c34f48ff5e2622" Dec 08 21:44:11 crc kubenswrapper[4912]: I1208 21:44:11.449664 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:11 crc kubenswrapper[4912]: I1208 21:44:11.449911 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8c3f43b2-7c72-4991-adb2-17f74320dcd0" containerName="nova-api-log" containerID="cri-o://33d87f32e192458ad88b1e67344bd04b19aa9ec44c069e04e5fb7320133e4b0c" gracePeriod=30 Dec 08 21:44:11 crc kubenswrapper[4912]: I1208 21:44:11.449972 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8c3f43b2-7c72-4991-adb2-17f74320dcd0" containerName="nova-api-api" containerID="cri-o://520543a455bb9e8e7b83c52a7574466afde012e8173cba4d473a67a848a4983c" gracePeriod=30 Dec 08 21:44:11 crc kubenswrapper[4912]: I1208 21:44:11.553692 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:44:11 crc kubenswrapper[4912]: I1208 21:44:11.554181 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="263bb3b6-a184-4b85-9644-5c1f58345c7c" containerName="nova-scheduler-scheduler" containerID="cri-o://d874680d24526330130328a54f7743a15e4f5a88fede36f385da34d6f13daceb" gracePeriod=30 Dec 08 21:44:11 crc kubenswrapper[4912]: I1208 21:44:11.588473 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:11 crc kubenswrapper[4912]: I1208 21:44:11.588772 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" containerName="nova-metadata-log" containerID="cri-o://f5dd6596c110ad15fd3aa72c137b9705b30a0b7b776b53eb4b80da86a8878294" gracePeriod=30 Dec 08 21:44:11 crc kubenswrapper[4912]: I1208 21:44:11.589163 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" containerName="nova-metadata-metadata" containerID="cri-o://83f84b72a7994af913da80aca452e88edd4dba2a5a140c106ea83048229260d4" gracePeriod=30 Dec 08 21:44:12 crc kubenswrapper[4912]: E1208 21:44:12.193517 4912 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d874680d24526330130328a54f7743a15e4f5a88fede36f385da34d6f13daceb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 08 21:44:12 crc kubenswrapper[4912]: E1208 21:44:12.195104 4912 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d874680d24526330130328a54f7743a15e4f5a88fede36f385da34d6f13daceb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 08 21:44:12 crc kubenswrapper[4912]: E1208 21:44:12.196687 4912 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d874680d24526330130328a54f7743a15e4f5a88fede36f385da34d6f13daceb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 08 21:44:12 crc kubenswrapper[4912]: E1208 21:44:12.196741 4912 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="263bb3b6-a184-4b85-9644-5c1f58345c7c" containerName="nova-scheduler-scheduler" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.295494 4912 generic.go:334] "Generic (PLEG): container finished" podID="8c3f43b2-7c72-4991-adb2-17f74320dcd0" containerID="520543a455bb9e8e7b83c52a7574466afde012e8173cba4d473a67a848a4983c" exitCode=0 Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.295520 4912 generic.go:334] "Generic (PLEG): container finished" podID="8c3f43b2-7c72-4991-adb2-17f74320dcd0" containerID="33d87f32e192458ad88b1e67344bd04b19aa9ec44c069e04e5fb7320133e4b0c" exitCode=143 Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.295558 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c3f43b2-7c72-4991-adb2-17f74320dcd0","Type":"ContainerDied","Data":"520543a455bb9e8e7b83c52a7574466afde012e8173cba4d473a67a848a4983c"} Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.295585 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c3f43b2-7c72-4991-adb2-17f74320dcd0","Type":"ContainerDied","Data":"33d87f32e192458ad88b1e67344bd04b19aa9ec44c069e04e5fb7320133e4b0c"} Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.298248 4912 generic.go:334] "Generic (PLEG): container finished" podID="e972bd39-06fa-4561-ac32-072551d4da6e" containerID="f5dd6596c110ad15fd3aa72c137b9705b30a0b7b776b53eb4b80da86a8878294" exitCode=143 Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.298276 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e972bd39-06fa-4561-ac32-072551d4da6e","Type":"ContainerDied","Data":"f5dd6596c110ad15fd3aa72c137b9705b30a0b7b776b53eb4b80da86a8878294"} Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.593244 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.746232 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-public-tls-certs\") pod \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.746401 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5g8lk\" (UniqueName: \"kubernetes.io/projected/8c3f43b2-7c72-4991-adb2-17f74320dcd0-kube-api-access-5g8lk\") pod \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.746462 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-combined-ca-bundle\") pod \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.746515 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3f43b2-7c72-4991-adb2-17f74320dcd0-logs\") pod \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.746540 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-internal-tls-certs\") pod \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.746650 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-config-data\") pod \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\" (UID: \"8c3f43b2-7c72-4991-adb2-17f74320dcd0\") " Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.747345 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c3f43b2-7c72-4991-adb2-17f74320dcd0-logs" (OuterVolumeSpecName: "logs") pod "8c3f43b2-7c72-4991-adb2-17f74320dcd0" (UID: "8c3f43b2-7c72-4991-adb2-17f74320dcd0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.755919 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c3f43b2-7c72-4991-adb2-17f74320dcd0-kube-api-access-5g8lk" (OuterVolumeSpecName: "kube-api-access-5g8lk") pod "8c3f43b2-7c72-4991-adb2-17f74320dcd0" (UID: "8c3f43b2-7c72-4991-adb2-17f74320dcd0"). InnerVolumeSpecName "kube-api-access-5g8lk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.774894 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-config-data" (OuterVolumeSpecName: "config-data") pod "8c3f43b2-7c72-4991-adb2-17f74320dcd0" (UID: "8c3f43b2-7c72-4991-adb2-17f74320dcd0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.776906 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c3f43b2-7c72-4991-adb2-17f74320dcd0" (UID: "8c3f43b2-7c72-4991-adb2-17f74320dcd0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.811478 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8c3f43b2-7c72-4991-adb2-17f74320dcd0" (UID: "8c3f43b2-7c72-4991-adb2-17f74320dcd0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.819327 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8c3f43b2-7c72-4991-adb2-17f74320dcd0" (UID: "8c3f43b2-7c72-4991-adb2-17f74320dcd0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.848457 4912 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.848494 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.848503 4912 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.848513 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5g8lk\" (UniqueName: \"kubernetes.io/projected/8c3f43b2-7c72-4991-adb2-17f74320dcd0-kube-api-access-5g8lk\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.848524 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3f43b2-7c72-4991-adb2-17f74320dcd0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:12 crc kubenswrapper[4912]: I1208 21:44:12.848534 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3f43b2-7c72-4991-adb2-17f74320dcd0-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.307990 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c3f43b2-7c72-4991-adb2-17f74320dcd0","Type":"ContainerDied","Data":"ab110dc156dfcf2486f972d7322bf5161a6bbf3061e82007cd6c2ccfb6b01bc1"} Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.308061 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.308368 4912 scope.go:117] "RemoveContainer" containerID="520543a455bb9e8e7b83c52a7574466afde012e8173cba4d473a67a848a4983c" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.335881 4912 scope.go:117] "RemoveContainer" containerID="33d87f32e192458ad88b1e67344bd04b19aa9ec44c069e04e5fb7320133e4b0c" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.343986 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.362303 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.372022 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:13 crc kubenswrapper[4912]: E1208 21:44:13.372579 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb72a504-41c4-43a7-8875-32807e8d27c0" containerName="nova-manage" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.372606 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb72a504-41c4-43a7-8875-32807e8d27c0" containerName="nova-manage" Dec 08 21:44:13 crc kubenswrapper[4912]: E1208 21:44:13.372632 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5765ec98-7f46-44c4-81bf-506b054e8e06" containerName="init" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.372641 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="5765ec98-7f46-44c4-81bf-506b054e8e06" containerName="init" Dec 08 21:44:13 crc kubenswrapper[4912]: E1208 21:44:13.372661 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c3f43b2-7c72-4991-adb2-17f74320dcd0" containerName="nova-api-log" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.372669 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c3f43b2-7c72-4991-adb2-17f74320dcd0" containerName="nova-api-log" Dec 08 21:44:13 crc kubenswrapper[4912]: E1208 21:44:13.372687 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c3f43b2-7c72-4991-adb2-17f74320dcd0" containerName="nova-api-api" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.372694 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c3f43b2-7c72-4991-adb2-17f74320dcd0" containerName="nova-api-api" Dec 08 21:44:13 crc kubenswrapper[4912]: E1208 21:44:13.372717 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5765ec98-7f46-44c4-81bf-506b054e8e06" containerName="dnsmasq-dns" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.372726 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="5765ec98-7f46-44c4-81bf-506b054e8e06" containerName="dnsmasq-dns" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.372932 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb72a504-41c4-43a7-8875-32807e8d27c0" containerName="nova-manage" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.372968 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c3f43b2-7c72-4991-adb2-17f74320dcd0" containerName="nova-api-log" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.372980 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c3f43b2-7c72-4991-adb2-17f74320dcd0" containerName="nova-api-api" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.372995 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="5765ec98-7f46-44c4-81bf-506b054e8e06" containerName="dnsmasq-dns" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.374189 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.376363 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.377060 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.381698 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.383462 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.459905 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-logs\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.459994 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-public-tls-certs\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.460065 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkc9j\" (UniqueName: \"kubernetes.io/projected/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-kube-api-access-mkc9j\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.460167 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-config-data\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.460205 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.460289 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.561825 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.562350 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-logs\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.562447 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-public-tls-certs\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.562540 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkc9j\" (UniqueName: \"kubernetes.io/projected/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-kube-api-access-mkc9j\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.563092 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-config-data\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.563218 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.563109 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-logs\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.568855 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.568874 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-public-tls-certs\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.574803 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.575540 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-config-data\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.580400 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkc9j\" (UniqueName: \"kubernetes.io/projected/9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3-kube-api-access-mkc9j\") pod \"nova-api-0\" (UID: \"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3\") " pod="openstack/nova-api-0" Dec 08 21:44:13 crc kubenswrapper[4912]: I1208 21:44:13.694860 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 08 21:44:14 crc kubenswrapper[4912]: I1208 21:44:14.133599 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 08 21:44:14 crc kubenswrapper[4912]: W1208 21:44:14.145294 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9adc2e28_5a3b_4cc7_9ea6_c99f2c6a97d3.slice/crio-41c6de070ce3b295ebbf77f361eab5e562c0cf072b8ae6ecd119a1717a1a7117 WatchSource:0}: Error finding container 41c6de070ce3b295ebbf77f361eab5e562c0cf072b8ae6ecd119a1717a1a7117: Status 404 returned error can't find the container with id 41c6de070ce3b295ebbf77f361eab5e562c0cf072b8ae6ecd119a1717a1a7117 Dec 08 21:44:14 crc kubenswrapper[4912]: I1208 21:44:14.321699 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3","Type":"ContainerStarted","Data":"41c6de070ce3b295ebbf77f361eab5e562c0cf072b8ae6ecd119a1717a1a7117"} Dec 08 21:44:14 crc kubenswrapper[4912]: I1208 21:44:14.439159 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c3f43b2-7c72-4991-adb2-17f74320dcd0" path="/var/lib/kubelet/pods/8c3f43b2-7c72-4991-adb2-17f74320dcd0/volumes" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.258338 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.187:8775/\": read tcp 10.217.0.2:55680->10.217.0.187:8775: read: connection reset by peer" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.258378 4912 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.187:8775/\": read tcp 10.217.0.2:55682->10.217.0.187:8775: read: connection reset by peer" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.336972 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3","Type":"ContainerStarted","Data":"d3eb4814c5defea00b7b36ab3b2f08eb5e2cda2a3709ec1a3d4c1a844b758427"} Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.337015 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3","Type":"ContainerStarted","Data":"d1cad8bb07d40484d808e33c2805d937b7a515f8c700f453de441bd243701168"} Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.341493 4912 generic.go:334] "Generic (PLEG): container finished" podID="e972bd39-06fa-4561-ac32-072551d4da6e" containerID="83f84b72a7994af913da80aca452e88edd4dba2a5a140c106ea83048229260d4" exitCode=0 Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.341537 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e972bd39-06fa-4561-ac32-072551d4da6e","Type":"ContainerDied","Data":"83f84b72a7994af913da80aca452e88edd4dba2a5a140c106ea83048229260d4"} Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.364727 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.364706307 podStartE2EDuration="2.364706307s" podCreationTimestamp="2025-12-08 21:44:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:15.35444108 +0000 UTC m=+1537.217443173" watchObservedRunningTime="2025-12-08 21:44:15.364706307 +0000 UTC m=+1537.227708390" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.695882 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.807215 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tjc7\" (UniqueName: \"kubernetes.io/projected/e972bd39-06fa-4561-ac32-072551d4da6e-kube-api-access-5tjc7\") pod \"e972bd39-06fa-4561-ac32-072551d4da6e\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.807348 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-nova-metadata-tls-certs\") pod \"e972bd39-06fa-4561-ac32-072551d4da6e\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.807393 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e972bd39-06fa-4561-ac32-072551d4da6e-logs\") pod \"e972bd39-06fa-4561-ac32-072551d4da6e\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.807424 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-config-data\") pod \"e972bd39-06fa-4561-ac32-072551d4da6e\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.807548 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-combined-ca-bundle\") pod \"e972bd39-06fa-4561-ac32-072551d4da6e\" (UID: \"e972bd39-06fa-4561-ac32-072551d4da6e\") " Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.808870 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e972bd39-06fa-4561-ac32-072551d4da6e-logs" (OuterVolumeSpecName: "logs") pod "e972bd39-06fa-4561-ac32-072551d4da6e" (UID: "e972bd39-06fa-4561-ac32-072551d4da6e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.815122 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e972bd39-06fa-4561-ac32-072551d4da6e-kube-api-access-5tjc7" (OuterVolumeSpecName: "kube-api-access-5tjc7") pod "e972bd39-06fa-4561-ac32-072551d4da6e" (UID: "e972bd39-06fa-4561-ac32-072551d4da6e"). InnerVolumeSpecName "kube-api-access-5tjc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.857669 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-config-data" (OuterVolumeSpecName: "config-data") pod "e972bd39-06fa-4561-ac32-072551d4da6e" (UID: "e972bd39-06fa-4561-ac32-072551d4da6e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.858441 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e972bd39-06fa-4561-ac32-072551d4da6e" (UID: "e972bd39-06fa-4561-ac32-072551d4da6e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.889192 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "e972bd39-06fa-4561-ac32-072551d4da6e" (UID: "e972bd39-06fa-4561-ac32-072551d4da6e"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.910827 4912 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.910869 4912 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e972bd39-06fa-4561-ac32-072551d4da6e-logs\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.910881 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.910894 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e972bd39-06fa-4561-ac32-072551d4da6e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:15 crc kubenswrapper[4912]: I1208 21:44:15.910905 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tjc7\" (UniqueName: \"kubernetes.io/projected/e972bd39-06fa-4561-ac32-072551d4da6e-kube-api-access-5tjc7\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.350528 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e972bd39-06fa-4561-ac32-072551d4da6e","Type":"ContainerDied","Data":"52233b38dca8eb83f6b0fc2b678a496f2a581890bb61bec260731c13dc48ef37"} Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.350837 4912 scope.go:117] "RemoveContainer" containerID="83f84b72a7994af913da80aca452e88edd4dba2a5a140c106ea83048229260d4" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.350960 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.354244 4912 generic.go:334] "Generic (PLEG): container finished" podID="263bb3b6-a184-4b85-9644-5c1f58345c7c" containerID="d874680d24526330130328a54f7743a15e4f5a88fede36f385da34d6f13daceb" exitCode=0 Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.354358 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"263bb3b6-a184-4b85-9644-5c1f58345c7c","Type":"ContainerDied","Data":"d874680d24526330130328a54f7743a15e4f5a88fede36f385da34d6f13daceb"} Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.384266 4912 scope.go:117] "RemoveContainer" containerID="f5dd6596c110ad15fd3aa72c137b9705b30a0b7b776b53eb4b80da86a8878294" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.395341 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.421543 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.458872 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" path="/var/lib/kubelet/pods/e972bd39-06fa-4561-ac32-072551d4da6e/volumes" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.459684 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:16 crc kubenswrapper[4912]: E1208 21:44:16.459980 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" containerName="nova-metadata-metadata" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.459996 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" containerName="nova-metadata-metadata" Dec 08 21:44:16 crc kubenswrapper[4912]: E1208 21:44:16.460023 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" containerName="nova-metadata-log" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.460029 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" containerName="nova-metadata-log" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.460280 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" containerName="nova-metadata-metadata" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.460310 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="e972bd39-06fa-4561-ac32-072551d4da6e" containerName="nova-metadata-log" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.461652 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.461743 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.462524 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.463567 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.464123 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.522359 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/263bb3b6-a184-4b85-9644-5c1f58345c7c-config-data\") pod \"263bb3b6-a184-4b85-9644-5c1f58345c7c\" (UID: \"263bb3b6-a184-4b85-9644-5c1f58345c7c\") " Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.522401 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/263bb3b6-a184-4b85-9644-5c1f58345c7c-combined-ca-bundle\") pod \"263bb3b6-a184-4b85-9644-5c1f58345c7c\" (UID: \"263bb3b6-a184-4b85-9644-5c1f58345c7c\") " Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.522553 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ct2tt\" (UniqueName: \"kubernetes.io/projected/263bb3b6-a184-4b85-9644-5c1f58345c7c-kube-api-access-ct2tt\") pod \"263bb3b6-a184-4b85-9644-5c1f58345c7c\" (UID: \"263bb3b6-a184-4b85-9644-5c1f58345c7c\") " Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.522976 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c094983f-9b35-4488-9d83-215a52b906a4-logs\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.523164 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c094983f-9b35-4488-9d83-215a52b906a4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.523300 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c094983f-9b35-4488-9d83-215a52b906a4-config-data\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.523320 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c094983f-9b35-4488-9d83-215a52b906a4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.523467 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9485\" (UniqueName: \"kubernetes.io/projected/c094983f-9b35-4488-9d83-215a52b906a4-kube-api-access-l9485\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.528767 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/263bb3b6-a184-4b85-9644-5c1f58345c7c-kube-api-access-ct2tt" (OuterVolumeSpecName: "kube-api-access-ct2tt") pod "263bb3b6-a184-4b85-9644-5c1f58345c7c" (UID: "263bb3b6-a184-4b85-9644-5c1f58345c7c"). InnerVolumeSpecName "kube-api-access-ct2tt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.550120 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/263bb3b6-a184-4b85-9644-5c1f58345c7c-config-data" (OuterVolumeSpecName: "config-data") pod "263bb3b6-a184-4b85-9644-5c1f58345c7c" (UID: "263bb3b6-a184-4b85-9644-5c1f58345c7c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.561795 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/263bb3b6-a184-4b85-9644-5c1f58345c7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "263bb3b6-a184-4b85-9644-5c1f58345c7c" (UID: "263bb3b6-a184-4b85-9644-5c1f58345c7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.625395 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c094983f-9b35-4488-9d83-215a52b906a4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.625502 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c094983f-9b35-4488-9d83-215a52b906a4-config-data\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.625527 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c094983f-9b35-4488-9d83-215a52b906a4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.625600 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9485\" (UniqueName: \"kubernetes.io/projected/c094983f-9b35-4488-9d83-215a52b906a4-kube-api-access-l9485\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.625689 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c094983f-9b35-4488-9d83-215a52b906a4-logs\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.625759 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/263bb3b6-a184-4b85-9644-5c1f58345c7c-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.625776 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/263bb3b6-a184-4b85-9644-5c1f58345c7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.625791 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ct2tt\" (UniqueName: \"kubernetes.io/projected/263bb3b6-a184-4b85-9644-5c1f58345c7c-kube-api-access-ct2tt\") on node \"crc\" DevicePath \"\"" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.626276 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c094983f-9b35-4488-9d83-215a52b906a4-logs\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.629455 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c094983f-9b35-4488-9d83-215a52b906a4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.630677 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c094983f-9b35-4488-9d83-215a52b906a4-config-data\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.630919 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c094983f-9b35-4488-9d83-215a52b906a4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.650416 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9485\" (UniqueName: \"kubernetes.io/projected/c094983f-9b35-4488-9d83-215a52b906a4-kube-api-access-l9485\") pod \"nova-metadata-0\" (UID: \"c094983f-9b35-4488-9d83-215a52b906a4\") " pod="openstack/nova-metadata-0" Dec 08 21:44:16 crc kubenswrapper[4912]: I1208 21:44:16.783783 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.285020 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.389883 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.389843 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"263bb3b6-a184-4b85-9644-5c1f58345c7c","Type":"ContainerDied","Data":"294a5f8454bef7948581153b56dda146d7efe24f5d5aff5577b8519d4429289e"} Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.389987 4912 scope.go:117] "RemoveContainer" containerID="d874680d24526330130328a54f7743a15e4f5a88fede36f385da34d6f13daceb" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.391610 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c094983f-9b35-4488-9d83-215a52b906a4","Type":"ContainerStarted","Data":"1876e9c20653cfc25888183076aff4904a14e47835c7a79dafb09bc7c81a0b22"} Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.466529 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.482233 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.494154 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:44:17 crc kubenswrapper[4912]: E1208 21:44:17.494689 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="263bb3b6-a184-4b85-9644-5c1f58345c7c" containerName="nova-scheduler-scheduler" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.494711 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="263bb3b6-a184-4b85-9644-5c1f58345c7c" containerName="nova-scheduler-scheduler" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.494883 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="263bb3b6-a184-4b85-9644-5c1f58345c7c" containerName="nova-scheduler-scheduler" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.495725 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.498888 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.507945 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.548794 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0790114-108f-4b92-915e-807fd4a7e0aa-config-data\") pod \"nova-scheduler-0\" (UID: \"f0790114-108f-4b92-915e-807fd4a7e0aa\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.548850 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0790114-108f-4b92-915e-807fd4a7e0aa-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f0790114-108f-4b92-915e-807fd4a7e0aa\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.548902 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk6k6\" (UniqueName: \"kubernetes.io/projected/f0790114-108f-4b92-915e-807fd4a7e0aa-kube-api-access-zk6k6\") pod \"nova-scheduler-0\" (UID: \"f0790114-108f-4b92-915e-807fd4a7e0aa\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.651297 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0790114-108f-4b92-915e-807fd4a7e0aa-config-data\") pod \"nova-scheduler-0\" (UID: \"f0790114-108f-4b92-915e-807fd4a7e0aa\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.651366 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0790114-108f-4b92-915e-807fd4a7e0aa-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f0790114-108f-4b92-915e-807fd4a7e0aa\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.651417 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk6k6\" (UniqueName: \"kubernetes.io/projected/f0790114-108f-4b92-915e-807fd4a7e0aa-kube-api-access-zk6k6\") pod \"nova-scheduler-0\" (UID: \"f0790114-108f-4b92-915e-807fd4a7e0aa\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.656672 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0790114-108f-4b92-915e-807fd4a7e0aa-config-data\") pod \"nova-scheduler-0\" (UID: \"f0790114-108f-4b92-915e-807fd4a7e0aa\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.664698 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0790114-108f-4b92-915e-807fd4a7e0aa-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f0790114-108f-4b92-915e-807fd4a7e0aa\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.672331 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk6k6\" (UniqueName: \"kubernetes.io/projected/f0790114-108f-4b92-915e-807fd4a7e0aa-kube-api-access-zk6k6\") pod \"nova-scheduler-0\" (UID: \"f0790114-108f-4b92-915e-807fd4a7e0aa\") " pod="openstack/nova-scheduler-0" Dec 08 21:44:17 crc kubenswrapper[4912]: I1208 21:44:17.823271 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 08 21:44:18 crc kubenswrapper[4912]: I1208 21:44:18.262202 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 08 21:44:18 crc kubenswrapper[4912]: I1208 21:44:18.406750 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f0790114-108f-4b92-915e-807fd4a7e0aa","Type":"ContainerStarted","Data":"d7a251f16263547d33940cd37c8c46408cf3d70264235a4c3a81e30d9aa3d54e"} Dec 08 21:44:18 crc kubenswrapper[4912]: I1208 21:44:18.409890 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c094983f-9b35-4488-9d83-215a52b906a4","Type":"ContainerStarted","Data":"5cc820e7203e8b91f28ff8143f56d5450f140d806a2a2f09e08d02c039fd7ccc"} Dec 08 21:44:18 crc kubenswrapper[4912]: I1208 21:44:18.409913 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c094983f-9b35-4488-9d83-215a52b906a4","Type":"ContainerStarted","Data":"2c2b618ba1a719652b4ea2f37acde03bbe9762d392eae4495cedd9d1faaf15af"} Dec 08 21:44:18 crc kubenswrapper[4912]: I1208 21:44:18.442988 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.442926878 podStartE2EDuration="2.442926878s" podCreationTimestamp="2025-12-08 21:44:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:18.42744657 +0000 UTC m=+1540.290448673" watchObservedRunningTime="2025-12-08 21:44:18.442926878 +0000 UTC m=+1540.305928961" Dec 08 21:44:18 crc kubenswrapper[4912]: I1208 21:44:18.448677 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="263bb3b6-a184-4b85-9644-5c1f58345c7c" path="/var/lib/kubelet/pods/263bb3b6-a184-4b85-9644-5c1f58345c7c/volumes" Dec 08 21:44:19 crc kubenswrapper[4912]: I1208 21:44:19.425248 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f0790114-108f-4b92-915e-807fd4a7e0aa","Type":"ContainerStarted","Data":"403109d39f95a738e4a38f5ea431f94f0b122e9e953ea589d3b1c61358685e2d"} Dec 08 21:44:19 crc kubenswrapper[4912]: I1208 21:44:19.452407 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.452382958 podStartE2EDuration="2.452382958s" podCreationTimestamp="2025-12-08 21:44:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 21:44:19.443713208 +0000 UTC m=+1541.306715291" watchObservedRunningTime="2025-12-08 21:44:19.452382958 +0000 UTC m=+1541.315385051" Dec 08 21:44:21 crc kubenswrapper[4912]: I1208 21:44:21.784281 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 08 21:44:21 crc kubenswrapper[4912]: I1208 21:44:21.784864 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 08 21:44:22 crc kubenswrapper[4912]: I1208 21:44:22.823970 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 08 21:44:23 crc kubenswrapper[4912]: I1208 21:44:23.695989 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:44:23 crc kubenswrapper[4912]: I1208 21:44:23.696244 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 08 21:44:24 crc kubenswrapper[4912]: I1208 21:44:24.711221 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.195:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:44:24 crc kubenswrapper[4912]: I1208 21:44:24.711221 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.195:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:44:26 crc kubenswrapper[4912]: I1208 21:44:26.785063 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 08 21:44:26 crc kubenswrapper[4912]: I1208 21:44:26.785721 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 08 21:44:27 crc kubenswrapper[4912]: I1208 21:44:27.802287 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c094983f-9b35-4488-9d83-215a52b906a4" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:44:27 crc kubenswrapper[4912]: I1208 21:44:27.802284 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c094983f-9b35-4488-9d83-215a52b906a4" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 21:44:27 crc kubenswrapper[4912]: I1208 21:44:27.824302 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 08 21:44:27 crc kubenswrapper[4912]: I1208 21:44:27.858968 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 08 21:44:28 crc kubenswrapper[4912]: I1208 21:44:28.540085 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 08 21:44:33 crc kubenswrapper[4912]: I1208 21:44:33.704995 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 08 21:44:33 crc kubenswrapper[4912]: I1208 21:44:33.706958 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 08 21:44:33 crc kubenswrapper[4912]: I1208 21:44:33.713505 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 08 21:44:33 crc kubenswrapper[4912]: I1208 21:44:33.714647 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 08 21:44:34 crc kubenswrapper[4912]: I1208 21:44:34.591470 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 08 21:44:34 crc kubenswrapper[4912]: I1208 21:44:34.599142 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 08 21:44:36 crc kubenswrapper[4912]: I1208 21:44:36.792047 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 08 21:44:36 crc kubenswrapper[4912]: I1208 21:44:36.799936 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 08 21:44:36 crc kubenswrapper[4912]: I1208 21:44:36.800656 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 08 21:44:37 crc kubenswrapper[4912]: I1208 21:44:37.627991 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.208877 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rmqkp"] Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.211529 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.235253 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rmqkp"] Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.349727 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/744551ec-9627-4b91-a16a-ed982eaacdae-catalog-content\") pod \"certified-operators-rmqkp\" (UID: \"744551ec-9627-4b91-a16a-ed982eaacdae\") " pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.349923 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/744551ec-9627-4b91-a16a-ed982eaacdae-utilities\") pod \"certified-operators-rmqkp\" (UID: \"744551ec-9627-4b91-a16a-ed982eaacdae\") " pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.350007 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvmwg\" (UniqueName: \"kubernetes.io/projected/744551ec-9627-4b91-a16a-ed982eaacdae-kube-api-access-wvmwg\") pod \"certified-operators-rmqkp\" (UID: \"744551ec-9627-4b91-a16a-ed982eaacdae\") " pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.452097 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/744551ec-9627-4b91-a16a-ed982eaacdae-catalog-content\") pod \"certified-operators-rmqkp\" (UID: \"744551ec-9627-4b91-a16a-ed982eaacdae\") " pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.452505 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/744551ec-9627-4b91-a16a-ed982eaacdae-catalog-content\") pod \"certified-operators-rmqkp\" (UID: \"744551ec-9627-4b91-a16a-ed982eaacdae\") " pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.452773 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/744551ec-9627-4b91-a16a-ed982eaacdae-utilities\") pod \"certified-operators-rmqkp\" (UID: \"744551ec-9627-4b91-a16a-ed982eaacdae\") " pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.453885 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/744551ec-9627-4b91-a16a-ed982eaacdae-utilities\") pod \"certified-operators-rmqkp\" (UID: \"744551ec-9627-4b91-a16a-ed982eaacdae\") " pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.454450 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvmwg\" (UniqueName: \"kubernetes.io/projected/744551ec-9627-4b91-a16a-ed982eaacdae-kube-api-access-wvmwg\") pod \"certified-operators-rmqkp\" (UID: \"744551ec-9627-4b91-a16a-ed982eaacdae\") " pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.473965 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvmwg\" (UniqueName: \"kubernetes.io/projected/744551ec-9627-4b91-a16a-ed982eaacdae-kube-api-access-wvmwg\") pod \"certified-operators-rmqkp\" (UID: \"744551ec-9627-4b91-a16a-ed982eaacdae\") " pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:49 crc kubenswrapper[4912]: I1208 21:44:49.566505 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:50 crc kubenswrapper[4912]: I1208 21:44:50.132236 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rmqkp"] Dec 08 21:44:50 crc kubenswrapper[4912]: I1208 21:44:50.765193 4912 generic.go:334] "Generic (PLEG): container finished" podID="744551ec-9627-4b91-a16a-ed982eaacdae" containerID="5d0741c442423d00fdd3e32383975287ad8d8b2e2d1d2effaaa0823ba7355c73" exitCode=0 Dec 08 21:44:50 crc kubenswrapper[4912]: I1208 21:44:50.765328 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmqkp" event={"ID":"744551ec-9627-4b91-a16a-ed982eaacdae","Type":"ContainerDied","Data":"5d0741c442423d00fdd3e32383975287ad8d8b2e2d1d2effaaa0823ba7355c73"} Dec 08 21:44:50 crc kubenswrapper[4912]: I1208 21:44:50.765511 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmqkp" event={"ID":"744551ec-9627-4b91-a16a-ed982eaacdae","Type":"ContainerStarted","Data":"9ad123d422ad5e601d3ac5dbfbf88343ec2af17b1b2107541bf253a031ff78f4"} Dec 08 21:44:56 crc kubenswrapper[4912]: I1208 21:44:56.827470 4912 generic.go:334] "Generic (PLEG): container finished" podID="744551ec-9627-4b91-a16a-ed982eaacdae" containerID="68c8e39157c973cc2bef1c82c1cbfa3236a2d67a972f510fe81c92a48d966f8a" exitCode=0 Dec 08 21:44:56 crc kubenswrapper[4912]: I1208 21:44:56.827542 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmqkp" event={"ID":"744551ec-9627-4b91-a16a-ed982eaacdae","Type":"ContainerDied","Data":"68c8e39157c973cc2bef1c82c1cbfa3236a2d67a972f510fe81c92a48d966f8a"} Dec 08 21:44:57 crc kubenswrapper[4912]: I1208 21:44:57.840984 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmqkp" event={"ID":"744551ec-9627-4b91-a16a-ed982eaacdae","Type":"ContainerStarted","Data":"5b5d2aefb793e33af53408b6effe29c03e37c00311e7d368a0382abc31079962"} Dec 08 21:44:57 crc kubenswrapper[4912]: I1208 21:44:57.872613 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rmqkp" podStartSLOduration=2.402390924 podStartE2EDuration="8.872592454s" podCreationTimestamp="2025-12-08 21:44:49 +0000 UTC" firstStartedPulling="2025-12-08 21:44:50.768321526 +0000 UTC m=+1572.631323619" lastFinishedPulling="2025-12-08 21:44:57.238523066 +0000 UTC m=+1579.101525149" observedRunningTime="2025-12-08 21:44:57.85644182 +0000 UTC m=+1579.719443913" watchObservedRunningTime="2025-12-08 21:44:57.872592454 +0000 UTC m=+1579.735594527" Dec 08 21:44:59 crc kubenswrapper[4912]: I1208 21:44:59.567789 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:59 crc kubenswrapper[4912]: I1208 21:44:59.568137 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:44:59 crc kubenswrapper[4912]: I1208 21:44:59.630903 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.145850 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj"] Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.147741 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.150797 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.151195 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.159637 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj"] Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.310583 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-config-volume\") pod \"collect-profiles-29420505-kwhsj\" (UID: \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.310627 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58n9c\" (UniqueName: \"kubernetes.io/projected/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-kube-api-access-58n9c\") pod \"collect-profiles-29420505-kwhsj\" (UID: \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.311074 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-secret-volume\") pod \"collect-profiles-29420505-kwhsj\" (UID: \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.412761 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-config-volume\") pod \"collect-profiles-29420505-kwhsj\" (UID: \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.412832 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58n9c\" (UniqueName: \"kubernetes.io/projected/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-kube-api-access-58n9c\") pod \"collect-profiles-29420505-kwhsj\" (UID: \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.412987 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-secret-volume\") pod \"collect-profiles-29420505-kwhsj\" (UID: \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.413945 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-config-volume\") pod \"collect-profiles-29420505-kwhsj\" (UID: \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.425274 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-secret-volume\") pod \"collect-profiles-29420505-kwhsj\" (UID: \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.431605 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58n9c\" (UniqueName: \"kubernetes.io/projected/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-kube-api-access-58n9c\") pod \"collect-profiles-29420505-kwhsj\" (UID: \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:00 crc kubenswrapper[4912]: I1208 21:45:00.479883 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:01 crc kubenswrapper[4912]: I1208 21:45:01.099814 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj"] Dec 08 21:45:01 crc kubenswrapper[4912]: I1208 21:45:01.882373 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" event={"ID":"0ca05c0b-ff07-4d80-a8ce-561849c1ae20","Type":"ContainerStarted","Data":"6990ce7c4ec0c8f6043f9b567c5579b90c9ade7dffad021c5bd5792e4e110fa4"} Dec 08 21:45:02 crc kubenswrapper[4912]: I1208 21:45:02.899748 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" event={"ID":"0ca05c0b-ff07-4d80-a8ce-561849c1ae20","Type":"ContainerStarted","Data":"ffd2053de346221d39e4344eb6fd54f55c79fcde5daefb63d56d33f15e1e255f"} Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.024802 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m6525"] Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.027496 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.036616 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m6525"] Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.088373 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e839d30-e06d-4c1e-bfd5-d2f64267c538-utilities\") pod \"redhat-marketplace-m6525\" (UID: \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\") " pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.088524 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2wdp\" (UniqueName: \"kubernetes.io/projected/5e839d30-e06d-4c1e-bfd5-d2f64267c538-kube-api-access-z2wdp\") pod \"redhat-marketplace-m6525\" (UID: \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\") " pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.088739 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e839d30-e06d-4c1e-bfd5-d2f64267c538-catalog-content\") pod \"redhat-marketplace-m6525\" (UID: \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\") " pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.190280 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e839d30-e06d-4c1e-bfd5-d2f64267c538-utilities\") pod \"redhat-marketplace-m6525\" (UID: \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\") " pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.190340 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2wdp\" (UniqueName: \"kubernetes.io/projected/5e839d30-e06d-4c1e-bfd5-d2f64267c538-kube-api-access-z2wdp\") pod \"redhat-marketplace-m6525\" (UID: \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\") " pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.190456 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e839d30-e06d-4c1e-bfd5-d2f64267c538-catalog-content\") pod \"redhat-marketplace-m6525\" (UID: \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\") " pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.191127 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e839d30-e06d-4c1e-bfd5-d2f64267c538-utilities\") pod \"redhat-marketplace-m6525\" (UID: \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\") " pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.191153 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e839d30-e06d-4c1e-bfd5-d2f64267c538-catalog-content\") pod \"redhat-marketplace-m6525\" (UID: \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\") " pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.214738 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2wdp\" (UniqueName: \"kubernetes.io/projected/5e839d30-e06d-4c1e-bfd5-d2f64267c538-kube-api-access-z2wdp\") pod \"redhat-marketplace-m6525\" (UID: \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\") " pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.351915 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.887059 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m6525"] Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.912756 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6525" event={"ID":"5e839d30-e06d-4c1e-bfd5-d2f64267c538","Type":"ContainerStarted","Data":"8e7babafb4f4798406a6a49c84c0199c8fb169c4ebaa1533d4fe7423cf489485"} Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.914901 4912 generic.go:334] "Generic (PLEG): container finished" podID="0ca05c0b-ff07-4d80-a8ce-561849c1ae20" containerID="ffd2053de346221d39e4344eb6fd54f55c79fcde5daefb63d56d33f15e1e255f" exitCode=0 Dec 08 21:45:03 crc kubenswrapper[4912]: I1208 21:45:03.914955 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" event={"ID":"0ca05c0b-ff07-4d80-a8ce-561849c1ae20","Type":"ContainerDied","Data":"ffd2053de346221d39e4344eb6fd54f55c79fcde5daefb63d56d33f15e1e255f"} Dec 08 21:45:04 crc kubenswrapper[4912]: I1208 21:45:04.926762 4912 generic.go:334] "Generic (PLEG): container finished" podID="5e839d30-e06d-4c1e-bfd5-d2f64267c538" containerID="4f4cbb3f8319f970b7dc81ab15bcd83836e2ca593e8f75a4c7ae847aaddd003e" exitCode=0 Dec 08 21:45:04 crc kubenswrapper[4912]: I1208 21:45:04.926824 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6525" event={"ID":"5e839d30-e06d-4c1e-bfd5-d2f64267c538","Type":"ContainerDied","Data":"4f4cbb3f8319f970b7dc81ab15bcd83836e2ca593e8f75a4c7ae847aaddd003e"} Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.348394 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.488565 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-config-volume\") pod \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\" (UID: \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\") " Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.488831 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-secret-volume\") pod \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\" (UID: \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\") " Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.488858 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58n9c\" (UniqueName: \"kubernetes.io/projected/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-kube-api-access-58n9c\") pod \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\" (UID: \"0ca05c0b-ff07-4d80-a8ce-561849c1ae20\") " Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.490219 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-config-volume" (OuterVolumeSpecName: "config-volume") pod "0ca05c0b-ff07-4d80-a8ce-561849c1ae20" (UID: "0ca05c0b-ff07-4d80-a8ce-561849c1ae20"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.491803 4912 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.494252 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-kube-api-access-58n9c" (OuterVolumeSpecName: "kube-api-access-58n9c") pod "0ca05c0b-ff07-4d80-a8ce-561849c1ae20" (UID: "0ca05c0b-ff07-4d80-a8ce-561849c1ae20"). InnerVolumeSpecName "kube-api-access-58n9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.495294 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0ca05c0b-ff07-4d80-a8ce-561849c1ae20" (UID: "0ca05c0b-ff07-4d80-a8ce-561849c1ae20"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.594049 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58n9c\" (UniqueName: \"kubernetes.io/projected/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-kube-api-access-58n9c\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.594337 4912 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ca05c0b-ff07-4d80-a8ce-561849c1ae20-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.940072 4912 generic.go:334] "Generic (PLEG): container finished" podID="5e839d30-e06d-4c1e-bfd5-d2f64267c538" containerID="7662cf8f928f2afd045bc8aa59ec6bdd631ead7c1fe876ca414da6cc73c9ec67" exitCode=0 Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.940148 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6525" event={"ID":"5e839d30-e06d-4c1e-bfd5-d2f64267c538","Type":"ContainerDied","Data":"7662cf8f928f2afd045bc8aa59ec6bdd631ead7c1fe876ca414da6cc73c9ec67"} Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.945977 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" event={"ID":"0ca05c0b-ff07-4d80-a8ce-561849c1ae20","Type":"ContainerDied","Data":"6990ce7c4ec0c8f6043f9b567c5579b90c9ade7dffad021c5bd5792e4e110fa4"} Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.946018 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6990ce7c4ec0c8f6043f9b567c5579b90c9ade7dffad021c5bd5792e4e110fa4" Dec 08 21:45:05 crc kubenswrapper[4912]: I1208 21:45:05.946070 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420505-kwhsj" Dec 08 21:45:07 crc kubenswrapper[4912]: I1208 21:45:07.019159 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6525" event={"ID":"5e839d30-e06d-4c1e-bfd5-d2f64267c538","Type":"ContainerStarted","Data":"fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599"} Dec 08 21:45:07 crc kubenswrapper[4912]: I1208 21:45:07.046611 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m6525" podStartSLOduration=3.602562444 podStartE2EDuration="5.046573212s" podCreationTimestamp="2025-12-08 21:45:02 +0000 UTC" firstStartedPulling="2025-12-08 21:45:04.928875421 +0000 UTC m=+1586.791877504" lastFinishedPulling="2025-12-08 21:45:06.372886189 +0000 UTC m=+1588.235888272" observedRunningTime="2025-12-08 21:45:07.042643521 +0000 UTC m=+1588.905645604" watchObservedRunningTime="2025-12-08 21:45:07.046573212 +0000 UTC m=+1588.909575295" Dec 08 21:45:09 crc kubenswrapper[4912]: I1208 21:45:09.614643 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 21:45:09 crc kubenswrapper[4912]: I1208 21:45:09.682044 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rmqkp"] Dec 08 21:45:09 crc kubenswrapper[4912]: I1208 21:45:09.729372 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gjqv9"] Dec 08 21:45:10 crc kubenswrapper[4912]: I1208 21:45:10.063456 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gjqv9" podUID="b9834161-a62c-4258-963d-3216a0f2d185" containerName="registry-server" containerID="cri-o://a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46" gracePeriod=2 Dec 08 21:45:10 crc kubenswrapper[4912]: I1208 21:45:10.545870 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:45:10 crc kubenswrapper[4912]: I1208 21:45:10.721756 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9834161-a62c-4258-963d-3216a0f2d185-utilities\") pod \"b9834161-a62c-4258-963d-3216a0f2d185\" (UID: \"b9834161-a62c-4258-963d-3216a0f2d185\") " Dec 08 21:45:10 crc kubenswrapper[4912]: I1208 21:45:10.721854 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9834161-a62c-4258-963d-3216a0f2d185-catalog-content\") pod \"b9834161-a62c-4258-963d-3216a0f2d185\" (UID: \"b9834161-a62c-4258-963d-3216a0f2d185\") " Dec 08 21:45:10 crc kubenswrapper[4912]: I1208 21:45:10.721945 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxn8j\" (UniqueName: \"kubernetes.io/projected/b9834161-a62c-4258-963d-3216a0f2d185-kube-api-access-lxn8j\") pod \"b9834161-a62c-4258-963d-3216a0f2d185\" (UID: \"b9834161-a62c-4258-963d-3216a0f2d185\") " Dec 08 21:45:10 crc kubenswrapper[4912]: I1208 21:45:10.722682 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9834161-a62c-4258-963d-3216a0f2d185-utilities" (OuterVolumeSpecName: "utilities") pod "b9834161-a62c-4258-963d-3216a0f2d185" (UID: "b9834161-a62c-4258-963d-3216a0f2d185"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:45:10 crc kubenswrapper[4912]: I1208 21:45:10.723083 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9834161-a62c-4258-963d-3216a0f2d185-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:10 crc kubenswrapper[4912]: I1208 21:45:10.734580 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9834161-a62c-4258-963d-3216a0f2d185-kube-api-access-lxn8j" (OuterVolumeSpecName: "kube-api-access-lxn8j") pod "b9834161-a62c-4258-963d-3216a0f2d185" (UID: "b9834161-a62c-4258-963d-3216a0f2d185"). InnerVolumeSpecName "kube-api-access-lxn8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:10 crc kubenswrapper[4912]: I1208 21:45:10.777251 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9834161-a62c-4258-963d-3216a0f2d185-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b9834161-a62c-4258-963d-3216a0f2d185" (UID: "b9834161-a62c-4258-963d-3216a0f2d185"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:45:10 crc kubenswrapper[4912]: I1208 21:45:10.825194 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9834161-a62c-4258-963d-3216a0f2d185-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:10 crc kubenswrapper[4912]: I1208 21:45:10.825462 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxn8j\" (UniqueName: \"kubernetes.io/projected/b9834161-a62c-4258-963d-3216a0f2d185-kube-api-access-lxn8j\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.074331 4912 generic.go:334] "Generic (PLEG): container finished" podID="b9834161-a62c-4258-963d-3216a0f2d185" containerID="a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46" exitCode=0 Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.074382 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gjqv9" event={"ID":"b9834161-a62c-4258-963d-3216a0f2d185","Type":"ContainerDied","Data":"a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46"} Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.074409 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gjqv9" Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.074429 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gjqv9" event={"ID":"b9834161-a62c-4258-963d-3216a0f2d185","Type":"ContainerDied","Data":"abec0d2eb2f021d0f5c3ca035904c5978a69faea6c56ac26106f461b91d43fd6"} Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.074458 4912 scope.go:117] "RemoveContainer" containerID="a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46" Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.101719 4912 scope.go:117] "RemoveContainer" containerID="fa2879a37a04a51fa3a6be0a4a8decd0be4b2510b99f49993297905e7c58b92c" Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.116291 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gjqv9"] Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.121897 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gjqv9"] Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.141427 4912 scope.go:117] "RemoveContainer" containerID="734ff729ff44b57a96bbf69099cb31df26602ebb98c0b6737f8e1a7ba4e349c5" Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.182604 4912 scope.go:117] "RemoveContainer" containerID="a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46" Dec 08 21:45:11 crc kubenswrapper[4912]: E1208 21:45:11.183910 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46\": container with ID starting with a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46 not found: ID does not exist" containerID="a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46" Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.183946 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46"} err="failed to get container status \"a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46\": rpc error: code = NotFound desc = could not find container \"a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46\": container with ID starting with a266437ea2ed157412942458401ae58bc9aaeb39ffaf1ead9bda35e52a636c46 not found: ID does not exist" Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.183985 4912 scope.go:117] "RemoveContainer" containerID="fa2879a37a04a51fa3a6be0a4a8decd0be4b2510b99f49993297905e7c58b92c" Dec 08 21:45:11 crc kubenswrapper[4912]: E1208 21:45:11.184364 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa2879a37a04a51fa3a6be0a4a8decd0be4b2510b99f49993297905e7c58b92c\": container with ID starting with fa2879a37a04a51fa3a6be0a4a8decd0be4b2510b99f49993297905e7c58b92c not found: ID does not exist" containerID="fa2879a37a04a51fa3a6be0a4a8decd0be4b2510b99f49993297905e7c58b92c" Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.184386 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa2879a37a04a51fa3a6be0a4a8decd0be4b2510b99f49993297905e7c58b92c"} err="failed to get container status \"fa2879a37a04a51fa3a6be0a4a8decd0be4b2510b99f49993297905e7c58b92c\": rpc error: code = NotFound desc = could not find container \"fa2879a37a04a51fa3a6be0a4a8decd0be4b2510b99f49993297905e7c58b92c\": container with ID starting with fa2879a37a04a51fa3a6be0a4a8decd0be4b2510b99f49993297905e7c58b92c not found: ID does not exist" Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.184400 4912 scope.go:117] "RemoveContainer" containerID="734ff729ff44b57a96bbf69099cb31df26602ebb98c0b6737f8e1a7ba4e349c5" Dec 08 21:45:11 crc kubenswrapper[4912]: E1208 21:45:11.184601 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"734ff729ff44b57a96bbf69099cb31df26602ebb98c0b6737f8e1a7ba4e349c5\": container with ID starting with 734ff729ff44b57a96bbf69099cb31df26602ebb98c0b6737f8e1a7ba4e349c5 not found: ID does not exist" containerID="734ff729ff44b57a96bbf69099cb31df26602ebb98c0b6737f8e1a7ba4e349c5" Dec 08 21:45:11 crc kubenswrapper[4912]: I1208 21:45:11.184637 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"734ff729ff44b57a96bbf69099cb31df26602ebb98c0b6737f8e1a7ba4e349c5"} err="failed to get container status \"734ff729ff44b57a96bbf69099cb31df26602ebb98c0b6737f8e1a7ba4e349c5\": rpc error: code = NotFound desc = could not find container \"734ff729ff44b57a96bbf69099cb31df26602ebb98c0b6737f8e1a7ba4e349c5\": container with ID starting with 734ff729ff44b57a96bbf69099cb31df26602ebb98c0b6737f8e1a7ba4e349c5 not found: ID does not exist" Dec 08 21:45:12 crc kubenswrapper[4912]: I1208 21:45:12.439637 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9834161-a62c-4258-963d-3216a0f2d185" path="/var/lib/kubelet/pods/b9834161-a62c-4258-963d-3216a0f2d185/volumes" Dec 08 21:45:13 crc kubenswrapper[4912]: I1208 21:45:13.352386 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:13 crc kubenswrapper[4912]: I1208 21:45:13.352652 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:13 crc kubenswrapper[4912]: I1208 21:45:13.410913 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:14 crc kubenswrapper[4912]: I1208 21:45:14.238661 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:14 crc kubenswrapper[4912]: I1208 21:45:14.600007 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m6525"] Dec 08 21:45:16 crc kubenswrapper[4912]: I1208 21:45:16.209671 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-m6525" podUID="5e839d30-e06d-4c1e-bfd5-d2f64267c538" containerName="registry-server" containerID="cri-o://fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599" gracePeriod=2 Dec 08 21:45:16 crc kubenswrapper[4912]: I1208 21:45:16.773190 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:16 crc kubenswrapper[4912]: I1208 21:45:16.941715 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e839d30-e06d-4c1e-bfd5-d2f64267c538-catalog-content\") pod \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\" (UID: \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\") " Dec 08 21:45:16 crc kubenswrapper[4912]: I1208 21:45:16.941898 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e839d30-e06d-4c1e-bfd5-d2f64267c538-utilities\") pod \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\" (UID: \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\") " Dec 08 21:45:16 crc kubenswrapper[4912]: I1208 21:45:16.942095 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2wdp\" (UniqueName: \"kubernetes.io/projected/5e839d30-e06d-4c1e-bfd5-d2f64267c538-kube-api-access-z2wdp\") pod \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\" (UID: \"5e839d30-e06d-4c1e-bfd5-d2f64267c538\") " Dec 08 21:45:16 crc kubenswrapper[4912]: I1208 21:45:16.943149 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e839d30-e06d-4c1e-bfd5-d2f64267c538-utilities" (OuterVolumeSpecName: "utilities") pod "5e839d30-e06d-4c1e-bfd5-d2f64267c538" (UID: "5e839d30-e06d-4c1e-bfd5-d2f64267c538"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:45:16 crc kubenswrapper[4912]: I1208 21:45:16.954357 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e839d30-e06d-4c1e-bfd5-d2f64267c538-kube-api-access-z2wdp" (OuterVolumeSpecName: "kube-api-access-z2wdp") pod "5e839d30-e06d-4c1e-bfd5-d2f64267c538" (UID: "5e839d30-e06d-4c1e-bfd5-d2f64267c538"). InnerVolumeSpecName "kube-api-access-z2wdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:45:16 crc kubenswrapper[4912]: I1208 21:45:16.967570 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e839d30-e06d-4c1e-bfd5-d2f64267c538-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5e839d30-e06d-4c1e-bfd5-d2f64267c538" (UID: "5e839d30-e06d-4c1e-bfd5-d2f64267c538"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.045215 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e839d30-e06d-4c1e-bfd5-d2f64267c538-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.045550 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e839d30-e06d-4c1e-bfd5-d2f64267c538-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.045562 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2wdp\" (UniqueName: \"kubernetes.io/projected/5e839d30-e06d-4c1e-bfd5-d2f64267c538-kube-api-access-z2wdp\") on node \"crc\" DevicePath \"\"" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.220545 4912 generic.go:334] "Generic (PLEG): container finished" podID="5e839d30-e06d-4c1e-bfd5-d2f64267c538" containerID="fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599" exitCode=0 Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.220604 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m6525" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.220608 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6525" event={"ID":"5e839d30-e06d-4c1e-bfd5-d2f64267c538","Type":"ContainerDied","Data":"fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599"} Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.220813 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6525" event={"ID":"5e839d30-e06d-4c1e-bfd5-d2f64267c538","Type":"ContainerDied","Data":"8e7babafb4f4798406a6a49c84c0199c8fb169c4ebaa1533d4fe7423cf489485"} Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.220848 4912 scope.go:117] "RemoveContainer" containerID="fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.248023 4912 scope.go:117] "RemoveContainer" containerID="7662cf8f928f2afd045bc8aa59ec6bdd631ead7c1fe876ca414da6cc73c9ec67" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.266546 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m6525"] Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.280334 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-m6525"] Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.289735 4912 scope.go:117] "RemoveContainer" containerID="4f4cbb3f8319f970b7dc81ab15bcd83836e2ca593e8f75a4c7ae847aaddd003e" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.313220 4912 scope.go:117] "RemoveContainer" containerID="fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599" Dec 08 21:45:17 crc kubenswrapper[4912]: E1208 21:45:17.313824 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599\": container with ID starting with fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599 not found: ID does not exist" containerID="fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.313875 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599"} err="failed to get container status \"fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599\": rpc error: code = NotFound desc = could not find container \"fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599\": container with ID starting with fffdc44182dfaef17a7fceb639d902f81dd70679d4fbc7eb0cdb9c2d9458f599 not found: ID does not exist" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.313903 4912 scope.go:117] "RemoveContainer" containerID="7662cf8f928f2afd045bc8aa59ec6bdd631ead7c1fe876ca414da6cc73c9ec67" Dec 08 21:45:17 crc kubenswrapper[4912]: E1208 21:45:17.314336 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7662cf8f928f2afd045bc8aa59ec6bdd631ead7c1fe876ca414da6cc73c9ec67\": container with ID starting with 7662cf8f928f2afd045bc8aa59ec6bdd631ead7c1fe876ca414da6cc73c9ec67 not found: ID does not exist" containerID="7662cf8f928f2afd045bc8aa59ec6bdd631ead7c1fe876ca414da6cc73c9ec67" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.314365 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7662cf8f928f2afd045bc8aa59ec6bdd631ead7c1fe876ca414da6cc73c9ec67"} err="failed to get container status \"7662cf8f928f2afd045bc8aa59ec6bdd631ead7c1fe876ca414da6cc73c9ec67\": rpc error: code = NotFound desc = could not find container \"7662cf8f928f2afd045bc8aa59ec6bdd631ead7c1fe876ca414da6cc73c9ec67\": container with ID starting with 7662cf8f928f2afd045bc8aa59ec6bdd631ead7c1fe876ca414da6cc73c9ec67 not found: ID does not exist" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.314390 4912 scope.go:117] "RemoveContainer" containerID="4f4cbb3f8319f970b7dc81ab15bcd83836e2ca593e8f75a4c7ae847aaddd003e" Dec 08 21:45:17 crc kubenswrapper[4912]: E1208 21:45:17.314905 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f4cbb3f8319f970b7dc81ab15bcd83836e2ca593e8f75a4c7ae847aaddd003e\": container with ID starting with 4f4cbb3f8319f970b7dc81ab15bcd83836e2ca593e8f75a4c7ae847aaddd003e not found: ID does not exist" containerID="4f4cbb3f8319f970b7dc81ab15bcd83836e2ca593e8f75a4c7ae847aaddd003e" Dec 08 21:45:17 crc kubenswrapper[4912]: I1208 21:45:17.314935 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f4cbb3f8319f970b7dc81ab15bcd83836e2ca593e8f75a4c7ae847aaddd003e"} err="failed to get container status \"4f4cbb3f8319f970b7dc81ab15bcd83836e2ca593e8f75a4c7ae847aaddd003e\": rpc error: code = NotFound desc = could not find container \"4f4cbb3f8319f970b7dc81ab15bcd83836e2ca593e8f75a4c7ae847aaddd003e\": container with ID starting with 4f4cbb3f8319f970b7dc81ab15bcd83836e2ca593e8f75a4c7ae847aaddd003e not found: ID does not exist" Dec 08 21:45:18 crc kubenswrapper[4912]: I1208 21:45:18.438771 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e839d30-e06d-4c1e-bfd5-d2f64267c538" path="/var/lib/kubelet/pods/5e839d30-e06d-4c1e-bfd5-d2f64267c538/volumes" Dec 08 21:45:32 crc kubenswrapper[4912]: I1208 21:45:32.965242 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:45:32 crc kubenswrapper[4912]: I1208 21:45:32.965764 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:45:37 crc kubenswrapper[4912]: I1208 21:45:37.492501 4912 generic.go:334] "Generic (PLEG): container finished" podID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" containerID="1fe8bbfe0c32ba5bb1fc2e9082cf233424c4b2dfeb84837ff6f189650ee0eb76" exitCode=1 Dec 08 21:45:37 crc kubenswrapper[4912]: I1208 21:45:37.492565 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerDied","Data":"1fe8bbfe0c32ba5bb1fc2e9082cf233424c4b2dfeb84837ff6f189650ee0eb76"} Dec 08 21:45:37 crc kubenswrapper[4912]: I1208 21:45:37.494300 4912 scope.go:117] "RemoveContainer" containerID="38f41ffe6e495e48d03b9058b53b45cbc362a8af403a678ee846d41d145aba9f" Dec 08 21:45:37 crc kubenswrapper[4912]: I1208 21:45:37.494919 4912 scope.go:117] "RemoveContainer" containerID="1fe8bbfe0c32ba5bb1fc2e9082cf233424c4b2dfeb84837ff6f189650ee0eb76" Dec 08 21:45:37 crc kubenswrapper[4912]: E1208 21:45:37.495186 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:45:45 crc kubenswrapper[4912]: I1208 21:45:45.249436 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:45:45 crc kubenswrapper[4912]: I1208 21:45:45.250009 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:45:45 crc kubenswrapper[4912]: I1208 21:45:45.250841 4912 scope.go:117] "RemoveContainer" containerID="1fe8bbfe0c32ba5bb1fc2e9082cf233424c4b2dfeb84837ff6f189650ee0eb76" Dec 08 21:45:45 crc kubenswrapper[4912]: E1208 21:45:45.251389 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:45:58 crc kubenswrapper[4912]: I1208 21:45:58.435574 4912 scope.go:117] "RemoveContainer" containerID="1fe8bbfe0c32ba5bb1fc2e9082cf233424c4b2dfeb84837ff6f189650ee0eb76" Dec 08 21:45:58 crc kubenswrapper[4912]: I1208 21:45:58.713884 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerStarted","Data":"f3614534ee37610285fe41dbfeb8f4f55f89098b0a01b90f17467794a08ad70e"} Dec 08 21:45:58 crc kubenswrapper[4912]: I1208 21:45:58.714321 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:46:02 crc kubenswrapper[4912]: I1208 21:46:02.964859 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:46:02 crc kubenswrapper[4912]: I1208 21:46:02.965453 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:46:05 crc kubenswrapper[4912]: I1208 21:46:05.252262 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:46:32 crc kubenswrapper[4912]: I1208 21:46:32.965197 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:46:32 crc kubenswrapper[4912]: I1208 21:46:32.965992 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:46:32 crc kubenswrapper[4912]: I1208 21:46:32.966067 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:46:32 crc kubenswrapper[4912]: I1208 21:46:32.966974 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:46:32 crc kubenswrapper[4912]: I1208 21:46:32.967112 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" gracePeriod=600 Dec 08 21:46:33 crc kubenswrapper[4912]: E1208 21:46:33.102491 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:46:34 crc kubenswrapper[4912]: I1208 21:46:34.067889 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" exitCode=0 Dec 08 21:46:34 crc kubenswrapper[4912]: I1208 21:46:34.067928 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3"} Dec 08 21:46:34 crc kubenswrapper[4912]: I1208 21:46:34.068618 4912 scope.go:117] "RemoveContainer" containerID="bca8a47721443f47b11226277d892ecf92290a5aadbe5d8268e53db7e2821cf1" Dec 08 21:46:34 crc kubenswrapper[4912]: I1208 21:46:34.069566 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:46:34 crc kubenswrapper[4912]: E1208 21:46:34.070105 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:46:42 crc kubenswrapper[4912]: I1208 21:46:42.943258 4912 scope.go:117] "RemoveContainer" containerID="cd85b21d8c49c0566a10c3fef4eddd510c5400559d97b73fbc4a60eea732a792" Dec 08 21:46:45 crc kubenswrapper[4912]: I1208 21:46:45.428309 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:46:45 crc kubenswrapper[4912]: E1208 21:46:45.428838 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:46:57 crc kubenswrapper[4912]: I1208 21:46:57.427521 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:46:57 crc kubenswrapper[4912]: E1208 21:46:57.432285 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:47:09 crc kubenswrapper[4912]: I1208 21:47:09.428214 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:47:09 crc kubenswrapper[4912]: E1208 21:47:09.428889 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:47:21 crc kubenswrapper[4912]: I1208 21:47:21.428089 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:47:21 crc kubenswrapper[4912]: E1208 21:47:21.428823 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:47:32 crc kubenswrapper[4912]: I1208 21:47:32.427681 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:47:32 crc kubenswrapper[4912]: E1208 21:47:32.428515 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:47:43 crc kubenswrapper[4912]: I1208 21:47:43.009770 4912 scope.go:117] "RemoveContainer" containerID="306d4c646b69b4e0dc3a92c5f59d022bbc7639f39f2cbc9b0c37ba191d22cd7b" Dec 08 21:47:46 crc kubenswrapper[4912]: I1208 21:47:46.428407 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:47:46 crc kubenswrapper[4912]: E1208 21:47:46.429165 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:47:59 crc kubenswrapper[4912]: I1208 21:47:59.428399 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:47:59 crc kubenswrapper[4912]: E1208 21:47:59.430439 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:48:10 crc kubenswrapper[4912]: I1208 21:48:10.428940 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:48:10 crc kubenswrapper[4912]: E1208 21:48:10.429783 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:48:21 crc kubenswrapper[4912]: I1208 21:48:21.428096 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:48:21 crc kubenswrapper[4912]: E1208 21:48:21.428964 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:48:30 crc kubenswrapper[4912]: I1208 21:48:30.270176 4912 generic.go:334] "Generic (PLEG): container finished" podID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" containerID="f3614534ee37610285fe41dbfeb8f4f55f89098b0a01b90f17467794a08ad70e" exitCode=1 Dec 08 21:48:30 crc kubenswrapper[4912]: I1208 21:48:30.270305 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerDied","Data":"f3614534ee37610285fe41dbfeb8f4f55f89098b0a01b90f17467794a08ad70e"} Dec 08 21:48:30 crc kubenswrapper[4912]: I1208 21:48:30.270741 4912 scope.go:117] "RemoveContainer" containerID="1fe8bbfe0c32ba5bb1fc2e9082cf233424c4b2dfeb84837ff6f189650ee0eb76" Dec 08 21:48:30 crc kubenswrapper[4912]: I1208 21:48:30.271550 4912 scope.go:117] "RemoveContainer" containerID="f3614534ee37610285fe41dbfeb8f4f55f89098b0a01b90f17467794a08ad70e" Dec 08 21:48:30 crc kubenswrapper[4912]: E1208 21:48:30.271810 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:48:35 crc kubenswrapper[4912]: I1208 21:48:35.248932 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:48:35 crc kubenswrapper[4912]: I1208 21:48:35.250293 4912 scope.go:117] "RemoveContainer" containerID="f3614534ee37610285fe41dbfeb8f4f55f89098b0a01b90f17467794a08ad70e" Dec 08 21:48:35 crc kubenswrapper[4912]: E1208 21:48:35.250616 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:48:36 crc kubenswrapper[4912]: I1208 21:48:36.428716 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:48:36 crc kubenswrapper[4912]: E1208 21:48:36.429182 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:48:43 crc kubenswrapper[4912]: I1208 21:48:43.087360 4912 scope.go:117] "RemoveContainer" containerID="6a9fdebb1400c7cf6d19d5055653f7e85b784bc786c9bc851e7c99d2f0ea6982" Dec 08 21:48:43 crc kubenswrapper[4912]: I1208 21:48:43.107701 4912 scope.go:117] "RemoveContainer" containerID="da04bd4638aa60403c92aa20a8699e81e729579a161d46ebb71701604cfe0d40" Dec 08 21:48:45 crc kubenswrapper[4912]: I1208 21:48:45.249495 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:48:45 crc kubenswrapper[4912]: I1208 21:48:45.250352 4912 scope.go:117] "RemoveContainer" containerID="f3614534ee37610285fe41dbfeb8f4f55f89098b0a01b90f17467794a08ad70e" Dec 08 21:48:45 crc kubenswrapper[4912]: E1208 21:48:45.250688 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:48:51 crc kubenswrapper[4912]: I1208 21:48:51.427793 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:48:51 crc kubenswrapper[4912]: E1208 21:48:51.428542 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:48:58 crc kubenswrapper[4912]: I1208 21:48:58.436429 4912 scope.go:117] "RemoveContainer" containerID="f3614534ee37610285fe41dbfeb8f4f55f89098b0a01b90f17467794a08ad70e" Dec 08 21:48:58 crc kubenswrapper[4912]: E1208 21:48:58.437253 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:49:04 crc kubenswrapper[4912]: I1208 21:49:04.428469 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:49:04 crc kubenswrapper[4912]: E1208 21:49:04.429386 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:49:13 crc kubenswrapper[4912]: I1208 21:49:13.429082 4912 scope.go:117] "RemoveContainer" containerID="f3614534ee37610285fe41dbfeb8f4f55f89098b0a01b90f17467794a08ad70e" Dec 08 21:49:13 crc kubenswrapper[4912]: I1208 21:49:13.675514 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerStarted","Data":"92e18995603c63d0c9bffc4b4ec6c9b46c8efa5a8645b411ec59ec43013168c1"} Dec 08 21:49:13 crc kubenswrapper[4912]: I1208 21:49:13.676126 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:49:17 crc kubenswrapper[4912]: I1208 21:49:17.427535 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:49:17 crc kubenswrapper[4912]: E1208 21:49:17.428337 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:49:25 crc kubenswrapper[4912]: I1208 21:49:25.251841 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:49:29 crc kubenswrapper[4912]: I1208 21:49:29.427708 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:49:29 crc kubenswrapper[4912]: E1208 21:49:29.428413 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:49:40 crc kubenswrapper[4912]: I1208 21:49:40.428763 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:49:40 crc kubenswrapper[4912]: E1208 21:49:40.429676 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:49:43 crc kubenswrapper[4912]: I1208 21:49:43.171251 4912 scope.go:117] "RemoveContainer" containerID="8e8efc064eafb595f34f248ad02afefbcbcc421b82285a99ecc83a1e5a96b1b3" Dec 08 21:49:43 crc kubenswrapper[4912]: I1208 21:49:43.227551 4912 scope.go:117] "RemoveContainer" containerID="5a62f5df35a1b5b6eaa34d535855002be112ec2b8b718bf7aeee84191d5750ef" Dec 08 21:49:53 crc kubenswrapper[4912]: I1208 21:49:53.427769 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:49:53 crc kubenswrapper[4912]: E1208 21:49:53.428873 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:50:07 crc kubenswrapper[4912]: I1208 21:50:07.427961 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:50:07 crc kubenswrapper[4912]: E1208 21:50:07.429831 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:50:15 crc kubenswrapper[4912]: I1208 21:50:15.052774 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-mwbl8"] Dec 08 21:50:15 crc kubenswrapper[4912]: I1208 21:50:15.066434 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-mwbl8"] Dec 08 21:50:16 crc kubenswrapper[4912]: I1208 21:50:16.048236 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-b6dd-account-create-update-wvsx7"] Dec 08 21:50:16 crc kubenswrapper[4912]: I1208 21:50:16.055720 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-b6dd-account-create-update-wvsx7"] Dec 08 21:50:16 crc kubenswrapper[4912]: I1208 21:50:16.893131 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31b6f852-ce10-4c0d-8a14-897b647ae732" path="/var/lib/kubelet/pods/31b6f852-ce10-4c0d-8a14-897b647ae732/volumes" Dec 08 21:50:16 crc kubenswrapper[4912]: I1208 21:50:16.894363 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e452832b-2c91-42f8-be68-d0a6b1ec4b1a" path="/var/lib/kubelet/pods/e452832b-2c91-42f8-be68-d0a6b1ec4b1a/volumes" Dec 08 21:50:19 crc kubenswrapper[4912]: I1208 21:50:19.428393 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:50:19 crc kubenswrapper[4912]: E1208 21:50:19.429008 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:50:21 crc kubenswrapper[4912]: I1208 21:50:21.029077 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-2kbmb"] Dec 08 21:50:21 crc kubenswrapper[4912]: I1208 21:50:21.038767 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-83db-account-create-update-bw9rr"] Dec 08 21:50:21 crc kubenswrapper[4912]: I1208 21:50:21.047306 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-83db-account-create-update-bw9rr"] Dec 08 21:50:21 crc kubenswrapper[4912]: I1208 21:50:21.055838 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-2kbmb"] Dec 08 21:50:22 crc kubenswrapper[4912]: I1208 21:50:22.441694 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69df27a2-fe75-49cf-88ef-4fba6c4884f9" path="/var/lib/kubelet/pods/69df27a2-fe75-49cf-88ef-4fba6c4884f9/volumes" Dec 08 21:50:22 crc kubenswrapper[4912]: I1208 21:50:22.442667 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b00be803-22da-4dfd-b211-cd9b1a44bf80" path="/var/lib/kubelet/pods/b00be803-22da-4dfd-b211-cd9b1a44bf80/volumes" Dec 08 21:50:29 crc kubenswrapper[4912]: I1208 21:50:29.147617 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-42dd-account-create-update-s4hkv"] Dec 08 21:50:29 crc kubenswrapper[4912]: I1208 21:50:29.156872 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-tf9gs"] Dec 08 21:50:29 crc kubenswrapper[4912]: I1208 21:50:29.164841 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-tf9gs"] Dec 08 21:50:29 crc kubenswrapper[4912]: I1208 21:50:29.172194 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-42dd-account-create-update-s4hkv"] Dec 08 21:50:30 crc kubenswrapper[4912]: I1208 21:50:30.439798 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dd8c54a-66bf-458c-9838-83214f8fafaa" path="/var/lib/kubelet/pods/3dd8c54a-66bf-458c-9838-83214f8fafaa/volumes" Dec 08 21:50:30 crc kubenswrapper[4912]: I1208 21:50:30.440740 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de10e2f6-f34e-4da7-bab3-a302a691ca52" path="/var/lib/kubelet/pods/de10e2f6-f34e-4da7-bab3-a302a691ca52/volumes" Dec 08 21:50:33 crc kubenswrapper[4912]: I1208 21:50:33.428361 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:50:33 crc kubenswrapper[4912]: E1208 21:50:33.429068 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:50:43 crc kubenswrapper[4912]: I1208 21:50:43.279322 4912 scope.go:117] "RemoveContainer" containerID="4669b5ab31cf110fa39e27bab09ab377d03338d59c9311b0d37573fe4eeb2a58" Dec 08 21:50:43 crc kubenswrapper[4912]: I1208 21:50:43.311288 4912 scope.go:117] "RemoveContainer" containerID="d959fc509d3b200a40f26432ec7b5ffd6950cbbd7219867602e2d2b07cb21709" Dec 08 21:50:43 crc kubenswrapper[4912]: I1208 21:50:43.368850 4912 scope.go:117] "RemoveContainer" containerID="754ac7e3f537f3a47b9805abb1966208987b1e1aa731333ffd8ee1b4f14ce25f" Dec 08 21:50:43 crc kubenswrapper[4912]: I1208 21:50:43.407527 4912 scope.go:117] "RemoveContainer" containerID="8545ec1d97a924e6f106e3d01963c606dae5702448a6ab3b10f2a39f2182d613" Dec 08 21:50:43 crc kubenswrapper[4912]: I1208 21:50:43.455821 4912 scope.go:117] "RemoveContainer" containerID="9195ce33b557d0d18d9a557c17087e5a4028c7218a546c9ff1a10df54a5187e0" Dec 08 21:50:43 crc kubenswrapper[4912]: I1208 21:50:43.485991 4912 scope.go:117] "RemoveContainer" containerID="5ef1dc44f0f94d909236d69110da9e533880f4dec69964281cb50574b356560d" Dec 08 21:50:47 crc kubenswrapper[4912]: I1208 21:50:47.428672 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:50:47 crc kubenswrapper[4912]: E1208 21:50:47.430080 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.041451 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-wlxkk"] Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.056125 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-mr8rp"] Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.069280 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-vd8lj"] Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.080662 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-fbd8-account-create-update-8cvpw"] Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.090019 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-957f-account-create-update-9hlk9"] Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.098204 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-fbd8-account-create-update-8cvpw"] Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.106232 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-957f-account-create-update-9hlk9"] Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.113565 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-wlxkk"] Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.121015 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-mr8rp"] Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.128493 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-vd8lj"] Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.136496 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-ed29-account-create-update-bgmtf"] Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.146106 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-ed29-account-create-update-bgmtf"] Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.437758 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="545dd2e8-5f39-4673-b406-f42be1033a46" path="/var/lib/kubelet/pods/545dd2e8-5f39-4673-b406-f42be1033a46/volumes" Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.438377 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74214961-90de-45d4-8b70-a53db54e6a8c" path="/var/lib/kubelet/pods/74214961-90de-45d4-8b70-a53db54e6a8c/volumes" Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.438894 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85aef8d3-55b7-44b3-81db-c84293e8c5fd" path="/var/lib/kubelet/pods/85aef8d3-55b7-44b3-81db-c84293e8c5fd/volumes" Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.439464 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b52032bd-e87f-4e3f-9502-847a57d802e4" path="/var/lib/kubelet/pods/b52032bd-e87f-4e3f-9502-847a57d802e4/volumes" Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.440476 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9227e7c-9a8f-4c43-a44b-29b113a7a8cf" path="/var/lib/kubelet/pods/e9227e7c-9a8f-4c43-a44b-29b113a7a8cf/volumes" Dec 08 21:50:48 crc kubenswrapper[4912]: I1208 21:50:48.441088 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f39e3e23-7dbe-49d6-9159-258cb947b761" path="/var/lib/kubelet/pods/f39e3e23-7dbe-49d6-9159-258cb947b761/volumes" Dec 08 21:50:54 crc kubenswrapper[4912]: I1208 21:50:54.030321 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-vs7fn"] Dec 08 21:50:54 crc kubenswrapper[4912]: I1208 21:50:54.039656 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-vs7fn"] Dec 08 21:50:54 crc kubenswrapper[4912]: I1208 21:50:54.440949 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22da54af-1006-403d-b33a-ae71353ee4e6" path="/var/lib/kubelet/pods/22da54af-1006-403d-b33a-ae71353ee4e6/volumes" Dec 08 21:50:55 crc kubenswrapper[4912]: I1208 21:50:55.027075 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-nftjn"] Dec 08 21:50:55 crc kubenswrapper[4912]: I1208 21:50:55.037221 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-nftjn"] Dec 08 21:50:56 crc kubenswrapper[4912]: I1208 21:50:56.438844 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d4fe027-c837-4bb3-b658-30c00d41ce24" path="/var/lib/kubelet/pods/3d4fe027-c837-4bb3-b658-30c00d41ce24/volumes" Dec 08 21:51:00 crc kubenswrapper[4912]: I1208 21:51:00.428550 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:51:00 crc kubenswrapper[4912]: E1208 21:51:00.429176 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:51:12 crc kubenswrapper[4912]: I1208 21:51:12.428925 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:51:12 crc kubenswrapper[4912]: E1208 21:51:12.429616 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.805165 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fslnr"] Dec 08 21:51:14 crc kubenswrapper[4912]: E1208 21:51:14.806189 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9834161-a62c-4258-963d-3216a0f2d185" containerName="extract-content" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.806223 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9834161-a62c-4258-963d-3216a0f2d185" containerName="extract-content" Dec 08 21:51:14 crc kubenswrapper[4912]: E1208 21:51:14.806249 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e839d30-e06d-4c1e-bfd5-d2f64267c538" containerName="extract-utilities" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.806258 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e839d30-e06d-4c1e-bfd5-d2f64267c538" containerName="extract-utilities" Dec 08 21:51:14 crc kubenswrapper[4912]: E1208 21:51:14.806284 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ca05c0b-ff07-4d80-a8ce-561849c1ae20" containerName="collect-profiles" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.806293 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ca05c0b-ff07-4d80-a8ce-561849c1ae20" containerName="collect-profiles" Dec 08 21:51:14 crc kubenswrapper[4912]: E1208 21:51:14.806308 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9834161-a62c-4258-963d-3216a0f2d185" containerName="registry-server" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.806315 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9834161-a62c-4258-963d-3216a0f2d185" containerName="registry-server" Dec 08 21:51:14 crc kubenswrapper[4912]: E1208 21:51:14.806329 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e839d30-e06d-4c1e-bfd5-d2f64267c538" containerName="extract-content" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.806337 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e839d30-e06d-4c1e-bfd5-d2f64267c538" containerName="extract-content" Dec 08 21:51:14 crc kubenswrapper[4912]: E1208 21:51:14.806350 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e839d30-e06d-4c1e-bfd5-d2f64267c538" containerName="registry-server" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.806355 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e839d30-e06d-4c1e-bfd5-d2f64267c538" containerName="registry-server" Dec 08 21:51:14 crc kubenswrapper[4912]: E1208 21:51:14.806374 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9834161-a62c-4258-963d-3216a0f2d185" containerName="extract-utilities" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.806380 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9834161-a62c-4258-963d-3216a0f2d185" containerName="extract-utilities" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.806627 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ca05c0b-ff07-4d80-a8ce-561849c1ae20" containerName="collect-profiles" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.806654 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e839d30-e06d-4c1e-bfd5-d2f64267c538" containerName="registry-server" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.806667 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9834161-a62c-4258-963d-3216a0f2d185" containerName="registry-server" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.808532 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.817582 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fslnr"] Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.842137 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d18baec2-2dd0-476a-80f8-837cb09d6f99-utilities\") pod \"community-operators-fslnr\" (UID: \"d18baec2-2dd0-476a-80f8-837cb09d6f99\") " pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.842256 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d18baec2-2dd0-476a-80f8-837cb09d6f99-catalog-content\") pod \"community-operators-fslnr\" (UID: \"d18baec2-2dd0-476a-80f8-837cb09d6f99\") " pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.842340 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr4tj\" (UniqueName: \"kubernetes.io/projected/d18baec2-2dd0-476a-80f8-837cb09d6f99-kube-api-access-wr4tj\") pod \"community-operators-fslnr\" (UID: \"d18baec2-2dd0-476a-80f8-837cb09d6f99\") " pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.944595 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d18baec2-2dd0-476a-80f8-837cb09d6f99-catalog-content\") pod \"community-operators-fslnr\" (UID: \"d18baec2-2dd0-476a-80f8-837cb09d6f99\") " pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.944736 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr4tj\" (UniqueName: \"kubernetes.io/projected/d18baec2-2dd0-476a-80f8-837cb09d6f99-kube-api-access-wr4tj\") pod \"community-operators-fslnr\" (UID: \"d18baec2-2dd0-476a-80f8-837cb09d6f99\") " pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.944840 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d18baec2-2dd0-476a-80f8-837cb09d6f99-utilities\") pod \"community-operators-fslnr\" (UID: \"d18baec2-2dd0-476a-80f8-837cb09d6f99\") " pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.945219 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d18baec2-2dd0-476a-80f8-837cb09d6f99-catalog-content\") pod \"community-operators-fslnr\" (UID: \"d18baec2-2dd0-476a-80f8-837cb09d6f99\") " pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.945569 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d18baec2-2dd0-476a-80f8-837cb09d6f99-utilities\") pod \"community-operators-fslnr\" (UID: \"d18baec2-2dd0-476a-80f8-837cb09d6f99\") " pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:14 crc kubenswrapper[4912]: I1208 21:51:14.965780 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr4tj\" (UniqueName: \"kubernetes.io/projected/d18baec2-2dd0-476a-80f8-837cb09d6f99-kube-api-access-wr4tj\") pod \"community-operators-fslnr\" (UID: \"d18baec2-2dd0-476a-80f8-837cb09d6f99\") " pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:15 crc kubenswrapper[4912]: I1208 21:51:15.150573 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:15 crc kubenswrapper[4912]: I1208 21:51:15.759675 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fslnr"] Dec 08 21:51:16 crc kubenswrapper[4912]: I1208 21:51:16.440884 4912 generic.go:334] "Generic (PLEG): container finished" podID="d18baec2-2dd0-476a-80f8-837cb09d6f99" containerID="6658f87720ef98267f99ece52204d7a196ab60549ce08c1e0b8529aa1192b7e5" exitCode=0 Dec 08 21:51:16 crc kubenswrapper[4912]: I1208 21:51:16.442964 4912 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 21:51:16 crc kubenswrapper[4912]: I1208 21:51:16.445571 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fslnr" event={"ID":"d18baec2-2dd0-476a-80f8-837cb09d6f99","Type":"ContainerDied","Data":"6658f87720ef98267f99ece52204d7a196ab60549ce08c1e0b8529aa1192b7e5"} Dec 08 21:51:16 crc kubenswrapper[4912]: I1208 21:51:16.445722 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fslnr" event={"ID":"d18baec2-2dd0-476a-80f8-837cb09d6f99","Type":"ContainerStarted","Data":"66cb4ebad4bf2efeb80b3581a606246bd9c0abb54fdfa25cd80e489b2d0d9140"} Dec 08 21:51:18 crc kubenswrapper[4912]: I1208 21:51:18.462141 4912 generic.go:334] "Generic (PLEG): container finished" podID="d18baec2-2dd0-476a-80f8-837cb09d6f99" containerID="710c83501d2f9c98a1c7eca790539b6f9293bcf7e87ac39fd95eabd2fd8803af" exitCode=0 Dec 08 21:51:18 crc kubenswrapper[4912]: I1208 21:51:18.462245 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fslnr" event={"ID":"d18baec2-2dd0-476a-80f8-837cb09d6f99","Type":"ContainerDied","Data":"710c83501d2f9c98a1c7eca790539b6f9293bcf7e87ac39fd95eabd2fd8803af"} Dec 08 21:51:19 crc kubenswrapper[4912]: I1208 21:51:19.474498 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fslnr" event={"ID":"d18baec2-2dd0-476a-80f8-837cb09d6f99","Type":"ContainerStarted","Data":"1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206"} Dec 08 21:51:19 crc kubenswrapper[4912]: I1208 21:51:19.496336 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fslnr" podStartSLOduration=3.033377627 podStartE2EDuration="5.496316554s" podCreationTimestamp="2025-12-08 21:51:14 +0000 UTC" firstStartedPulling="2025-12-08 21:51:16.442694486 +0000 UTC m=+1958.305696569" lastFinishedPulling="2025-12-08 21:51:18.905633413 +0000 UTC m=+1960.768635496" observedRunningTime="2025-12-08 21:51:19.489249302 +0000 UTC m=+1961.352251385" watchObservedRunningTime="2025-12-08 21:51:19.496316554 +0000 UTC m=+1961.359318637" Dec 08 21:51:24 crc kubenswrapper[4912]: I1208 21:51:24.044899 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-7hldc"] Dec 08 21:51:24 crc kubenswrapper[4912]: I1208 21:51:24.054406 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-7hldc"] Dec 08 21:51:24 crc kubenswrapper[4912]: I1208 21:51:24.439028 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0aefa1f-6b89-4aa1-b25e-3f3275ed571a" path="/var/lib/kubelet/pods/c0aefa1f-6b89-4aa1-b25e-3f3275ed571a/volumes" Dec 08 21:51:25 crc kubenswrapper[4912]: I1208 21:51:25.150768 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:25 crc kubenswrapper[4912]: I1208 21:51:25.150840 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:25 crc kubenswrapper[4912]: I1208 21:51:25.200830 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:25 crc kubenswrapper[4912]: I1208 21:51:25.429892 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:51:25 crc kubenswrapper[4912]: E1208 21:51:25.431166 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:51:25 crc kubenswrapper[4912]: I1208 21:51:25.574616 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:25 crc kubenswrapper[4912]: I1208 21:51:25.631592 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fslnr"] Dec 08 21:51:27 crc kubenswrapper[4912]: I1208 21:51:27.032990 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-krnh9"] Dec 08 21:51:27 crc kubenswrapper[4912]: I1208 21:51:27.042772 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-dpz7w"] Dec 08 21:51:27 crc kubenswrapper[4912]: I1208 21:51:27.051360 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-krnh9"] Dec 08 21:51:27 crc kubenswrapper[4912]: I1208 21:51:27.061213 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-dpz7w"] Dec 08 21:51:27 crc kubenswrapper[4912]: I1208 21:51:27.551278 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fslnr" podUID="d18baec2-2dd0-476a-80f8-837cb09d6f99" containerName="registry-server" containerID="cri-o://1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206" gracePeriod=2 Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.128199 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.171405 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wr4tj\" (UniqueName: \"kubernetes.io/projected/d18baec2-2dd0-476a-80f8-837cb09d6f99-kube-api-access-wr4tj\") pod \"d18baec2-2dd0-476a-80f8-837cb09d6f99\" (UID: \"d18baec2-2dd0-476a-80f8-837cb09d6f99\") " Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.171471 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d18baec2-2dd0-476a-80f8-837cb09d6f99-catalog-content\") pod \"d18baec2-2dd0-476a-80f8-837cb09d6f99\" (UID: \"d18baec2-2dd0-476a-80f8-837cb09d6f99\") " Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.171540 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d18baec2-2dd0-476a-80f8-837cb09d6f99-utilities\") pod \"d18baec2-2dd0-476a-80f8-837cb09d6f99\" (UID: \"d18baec2-2dd0-476a-80f8-837cb09d6f99\") " Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.172754 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d18baec2-2dd0-476a-80f8-837cb09d6f99-utilities" (OuterVolumeSpecName: "utilities") pod "d18baec2-2dd0-476a-80f8-837cb09d6f99" (UID: "d18baec2-2dd0-476a-80f8-837cb09d6f99"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.178422 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d18baec2-2dd0-476a-80f8-837cb09d6f99-kube-api-access-wr4tj" (OuterVolumeSpecName: "kube-api-access-wr4tj") pod "d18baec2-2dd0-476a-80f8-837cb09d6f99" (UID: "d18baec2-2dd0-476a-80f8-837cb09d6f99"). InnerVolumeSpecName "kube-api-access-wr4tj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.273423 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wr4tj\" (UniqueName: \"kubernetes.io/projected/d18baec2-2dd0-476a-80f8-837cb09d6f99-kube-api-access-wr4tj\") on node \"crc\" DevicePath \"\"" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.273768 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d18baec2-2dd0-476a-80f8-837cb09d6f99-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.277843 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d18baec2-2dd0-476a-80f8-837cb09d6f99-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d18baec2-2dd0-476a-80f8-837cb09d6f99" (UID: "d18baec2-2dd0-476a-80f8-837cb09d6f99"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.375441 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d18baec2-2dd0-476a-80f8-837cb09d6f99-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.452811 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29694038-be1b-4d16-95ce-16c516b0f8bf" path="/var/lib/kubelet/pods/29694038-be1b-4d16-95ce-16c516b0f8bf/volumes" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.453754 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47140ad7-c2c2-4c61-97da-17341fccc09b" path="/var/lib/kubelet/pods/47140ad7-c2c2-4c61-97da-17341fccc09b/volumes" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.562554 4912 generic.go:334] "Generic (PLEG): container finished" podID="d18baec2-2dd0-476a-80f8-837cb09d6f99" containerID="1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206" exitCode=0 Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.562608 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fslnr" event={"ID":"d18baec2-2dd0-476a-80f8-837cb09d6f99","Type":"ContainerDied","Data":"1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206"} Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.562655 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fslnr" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.562680 4912 scope.go:117] "RemoveContainer" containerID="1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.562663 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fslnr" event={"ID":"d18baec2-2dd0-476a-80f8-837cb09d6f99","Type":"ContainerDied","Data":"66cb4ebad4bf2efeb80b3581a606246bd9c0abb54fdfa25cd80e489b2d0d9140"} Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.585158 4912 scope.go:117] "RemoveContainer" containerID="710c83501d2f9c98a1c7eca790539b6f9293bcf7e87ac39fd95eabd2fd8803af" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.588756 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fslnr"] Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.596623 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fslnr"] Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.616955 4912 scope.go:117] "RemoveContainer" containerID="6658f87720ef98267f99ece52204d7a196ab60549ce08c1e0b8529aa1192b7e5" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.649314 4912 scope.go:117] "RemoveContainer" containerID="1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206" Dec 08 21:51:28 crc kubenswrapper[4912]: E1208 21:51:28.649825 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206\": container with ID starting with 1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206 not found: ID does not exist" containerID="1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.649872 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206"} err="failed to get container status \"1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206\": rpc error: code = NotFound desc = could not find container \"1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206\": container with ID starting with 1501cf530f3a4b1e100b315b208af0e015e459f8c80c7a9c7040ad222c513206 not found: ID does not exist" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.649902 4912 scope.go:117] "RemoveContainer" containerID="710c83501d2f9c98a1c7eca790539b6f9293bcf7e87ac39fd95eabd2fd8803af" Dec 08 21:51:28 crc kubenswrapper[4912]: E1208 21:51:28.650299 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"710c83501d2f9c98a1c7eca790539b6f9293bcf7e87ac39fd95eabd2fd8803af\": container with ID starting with 710c83501d2f9c98a1c7eca790539b6f9293bcf7e87ac39fd95eabd2fd8803af not found: ID does not exist" containerID="710c83501d2f9c98a1c7eca790539b6f9293bcf7e87ac39fd95eabd2fd8803af" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.650333 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"710c83501d2f9c98a1c7eca790539b6f9293bcf7e87ac39fd95eabd2fd8803af"} err="failed to get container status \"710c83501d2f9c98a1c7eca790539b6f9293bcf7e87ac39fd95eabd2fd8803af\": rpc error: code = NotFound desc = could not find container \"710c83501d2f9c98a1c7eca790539b6f9293bcf7e87ac39fd95eabd2fd8803af\": container with ID starting with 710c83501d2f9c98a1c7eca790539b6f9293bcf7e87ac39fd95eabd2fd8803af not found: ID does not exist" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.650355 4912 scope.go:117] "RemoveContainer" containerID="6658f87720ef98267f99ece52204d7a196ab60549ce08c1e0b8529aa1192b7e5" Dec 08 21:51:28 crc kubenswrapper[4912]: E1208 21:51:28.650625 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6658f87720ef98267f99ece52204d7a196ab60549ce08c1e0b8529aa1192b7e5\": container with ID starting with 6658f87720ef98267f99ece52204d7a196ab60549ce08c1e0b8529aa1192b7e5 not found: ID does not exist" containerID="6658f87720ef98267f99ece52204d7a196ab60549ce08c1e0b8529aa1192b7e5" Dec 08 21:51:28 crc kubenswrapper[4912]: I1208 21:51:28.650644 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6658f87720ef98267f99ece52204d7a196ab60549ce08c1e0b8529aa1192b7e5"} err="failed to get container status \"6658f87720ef98267f99ece52204d7a196ab60549ce08c1e0b8529aa1192b7e5\": rpc error: code = NotFound desc = could not find container \"6658f87720ef98267f99ece52204d7a196ab60549ce08c1e0b8529aa1192b7e5\": container with ID starting with 6658f87720ef98267f99ece52204d7a196ab60549ce08c1e0b8529aa1192b7e5 not found: ID does not exist" Dec 08 21:51:30 crc kubenswrapper[4912]: I1208 21:51:30.437800 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d18baec2-2dd0-476a-80f8-837cb09d6f99" path="/var/lib/kubelet/pods/d18baec2-2dd0-476a-80f8-837cb09d6f99/volumes" Dec 08 21:51:36 crc kubenswrapper[4912]: I1208 21:51:36.428102 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:51:37 crc kubenswrapper[4912]: I1208 21:51:37.660860 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"396b55f109f7b5abd3be56a09199c2ddc4c83c80d58fdee522e52231482ed651"} Dec 08 21:51:40 crc kubenswrapper[4912]: I1208 21:51:40.045533 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-4xrcd"] Dec 08 21:51:40 crc kubenswrapper[4912]: I1208 21:51:40.053206 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-4xrcd"] Dec 08 21:51:40 crc kubenswrapper[4912]: I1208 21:51:40.440118 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4574c96-aa92-4621-92e2-d8ee041d94c8" path="/var/lib/kubelet/pods/f4574c96-aa92-4621-92e2-d8ee041d94c8/volumes" Dec 08 21:51:43 crc kubenswrapper[4912]: I1208 21:51:43.630391 4912 scope.go:117] "RemoveContainer" containerID="1728362c583209638b70a9b644027db1090af689cf56281745c8a5be0ed4a21b" Dec 08 21:51:43 crc kubenswrapper[4912]: I1208 21:51:43.666237 4912 scope.go:117] "RemoveContainer" containerID="7464cc8c599b7d767baa3b5effc54dd136cf4fcec755ca4de4939e5556bd1665" Dec 08 21:51:43 crc kubenswrapper[4912]: I1208 21:51:43.704697 4912 scope.go:117] "RemoveContainer" containerID="bc9f7995cc9b298f6dda16ff398900d2f4ca33353c9e414f3a70a3c00e8f7811" Dec 08 21:51:43 crc kubenswrapper[4912]: I1208 21:51:43.745913 4912 scope.go:117] "RemoveContainer" containerID="e69a7d63d022206f98327b1e8ce0137065f501e6071416f527c30f5f1473d807" Dec 08 21:51:43 crc kubenswrapper[4912]: I1208 21:51:43.805725 4912 scope.go:117] "RemoveContainer" containerID="07f3265906a397d70be21b913f06ed222a4308b8eb3b4bf34931af3e2fdec612" Dec 08 21:51:43 crc kubenswrapper[4912]: I1208 21:51:43.844334 4912 scope.go:117] "RemoveContainer" containerID="d95722f90a5682996e53dd3a24fc47ac1e9f9f6a2ceb792e31bae4626d68fd2a" Dec 08 21:51:43 crc kubenswrapper[4912]: I1208 21:51:43.876152 4912 scope.go:117] "RemoveContainer" containerID="c6d6e46d6e5b92add6b665e1dc856c21546d651ba2bb3c43f6629b60b35371fb" Dec 08 21:51:43 crc kubenswrapper[4912]: I1208 21:51:43.913906 4912 scope.go:117] "RemoveContainer" containerID="62ea6c5140e63b84bfbe83ed0c1b329312ad81a300301341b186a345f421e954" Dec 08 21:51:43 crc kubenswrapper[4912]: I1208 21:51:43.933024 4912 scope.go:117] "RemoveContainer" containerID="12a1f277452a9245132795a91cc6458463dca71fc4d1dbf77f5fea7b513ca1b6" Dec 08 21:51:43 crc kubenswrapper[4912]: I1208 21:51:43.951722 4912 scope.go:117] "RemoveContainer" containerID="6129e6851894b9dce578cfad956a60d0eae3bea487a2fe3f1ea364442ff2ea21" Dec 08 21:51:43 crc kubenswrapper[4912]: I1208 21:51:43.994218 4912 scope.go:117] "RemoveContainer" containerID="cdaf6e741d17ca0448781b733bfa0eab46c78ca7a352c65340b22f2d14ddb87b" Dec 08 21:51:44 crc kubenswrapper[4912]: I1208 21:51:44.045719 4912 scope.go:117] "RemoveContainer" containerID="34432d647fab3a38b1d37480eb79f732e8f8464ca53533ee285cf5d5eedbbd60" Dec 08 21:51:44 crc kubenswrapper[4912]: I1208 21:51:44.052531 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-djggl"] Dec 08 21:51:44 crc kubenswrapper[4912]: I1208 21:51:44.060454 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-djggl"] Dec 08 21:51:44 crc kubenswrapper[4912]: I1208 21:51:44.440439 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06e69c2b-f54a-466a-9f5c-60499b4f5123" path="/var/lib/kubelet/pods/06e69c2b-f54a-466a-9f5c-60499b4f5123/volumes" Dec 08 21:51:50 crc kubenswrapper[4912]: I1208 21:51:50.804934 4912 generic.go:334] "Generic (PLEG): container finished" podID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" containerID="92e18995603c63d0c9bffc4b4ec6c9b46c8efa5a8645b411ec59ec43013168c1" exitCode=1 Dec 08 21:51:50 crc kubenswrapper[4912]: I1208 21:51:50.804970 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerDied","Data":"92e18995603c63d0c9bffc4b4ec6c9b46c8efa5a8645b411ec59ec43013168c1"} Dec 08 21:51:50 crc kubenswrapper[4912]: I1208 21:51:50.805480 4912 scope.go:117] "RemoveContainer" containerID="f3614534ee37610285fe41dbfeb8f4f55f89098b0a01b90f17467794a08ad70e" Dec 08 21:51:50 crc kubenswrapper[4912]: I1208 21:51:50.806277 4912 scope.go:117] "RemoveContainer" containerID="92e18995603c63d0c9bffc4b4ec6c9b46c8efa5a8645b411ec59ec43013168c1" Dec 08 21:51:50 crc kubenswrapper[4912]: E1208 21:51:50.806584 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:51:55 crc kubenswrapper[4912]: I1208 21:51:55.248831 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:51:55 crc kubenswrapper[4912]: I1208 21:51:55.250019 4912 scope.go:117] "RemoveContainer" containerID="92e18995603c63d0c9bffc4b4ec6c9b46c8efa5a8645b411ec59ec43013168c1" Dec 08 21:51:55 crc kubenswrapper[4912]: E1208 21:51:55.250295 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:52:05 crc kubenswrapper[4912]: I1208 21:52:05.249389 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:52:05 crc kubenswrapper[4912]: I1208 21:52:05.250750 4912 scope.go:117] "RemoveContainer" containerID="92e18995603c63d0c9bffc4b4ec6c9b46c8efa5a8645b411ec59ec43013168c1" Dec 08 21:52:05 crc kubenswrapper[4912]: E1208 21:52:05.251076 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:52:20 crc kubenswrapper[4912]: I1208 21:52:20.428677 4912 scope.go:117] "RemoveContainer" containerID="92e18995603c63d0c9bffc4b4ec6c9b46c8efa5a8645b411ec59ec43013168c1" Dec 08 21:52:20 crc kubenswrapper[4912]: E1208 21:52:20.429474 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:52:26 crc kubenswrapper[4912]: I1208 21:52:26.037900 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-8xbcq"] Dec 08 21:52:26 crc kubenswrapper[4912]: I1208 21:52:26.047968 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-8xbcq"] Dec 08 21:52:26 crc kubenswrapper[4912]: I1208 21:52:26.440057 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d876181b-ee71-4961-a40b-4bf1f634bc59" path="/var/lib/kubelet/pods/d876181b-ee71-4961-a40b-4bf1f634bc59/volumes" Dec 08 21:52:27 crc kubenswrapper[4912]: I1208 21:52:27.032401 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-29a0-account-create-update-8ftmm"] Dec 08 21:52:27 crc kubenswrapper[4912]: I1208 21:52:27.052559 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-29a0-account-create-update-8ftmm"] Dec 08 21:52:27 crc kubenswrapper[4912]: I1208 21:52:27.061842 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-dc7c-account-create-update-x4cjw"] Dec 08 21:52:27 crc kubenswrapper[4912]: I1208 21:52:27.069316 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-d03c-account-create-update-5xf7p"] Dec 08 21:52:27 crc kubenswrapper[4912]: I1208 21:52:27.076686 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-72qk9"] Dec 08 21:52:27 crc kubenswrapper[4912]: I1208 21:52:27.084240 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-dc7c-account-create-update-x4cjw"] Dec 08 21:52:27 crc kubenswrapper[4912]: I1208 21:52:27.091634 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-d03c-account-create-update-5xf7p"] Dec 08 21:52:27 crc kubenswrapper[4912]: I1208 21:52:27.099102 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-gtnp8"] Dec 08 21:52:27 crc kubenswrapper[4912]: I1208 21:52:27.105300 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-72qk9"] Dec 08 21:52:27 crc kubenswrapper[4912]: I1208 21:52:27.111676 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-gtnp8"] Dec 08 21:52:28 crc kubenswrapper[4912]: I1208 21:52:28.440112 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b451d56-df8d-43b1-bcc0-18aeba033358" path="/var/lib/kubelet/pods/5b451d56-df8d-43b1-bcc0-18aeba033358/volumes" Dec 08 21:52:28 crc kubenswrapper[4912]: I1208 21:52:28.441699 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="780887b8-a649-4d74-b050-43fc706b23cb" path="/var/lib/kubelet/pods/780887b8-a649-4d74-b050-43fc706b23cb/volumes" Dec 08 21:52:28 crc kubenswrapper[4912]: I1208 21:52:28.442745 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c46d9ff-9bd3-434b-9917-db43f38a2320" path="/var/lib/kubelet/pods/9c46d9ff-9bd3-434b-9917-db43f38a2320/volumes" Dec 08 21:52:28 crc kubenswrapper[4912]: I1208 21:52:28.443548 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2ed86f7-7769-4ccd-af95-9377719f1856" path="/var/lib/kubelet/pods/a2ed86f7-7769-4ccd-af95-9377719f1856/volumes" Dec 08 21:52:28 crc kubenswrapper[4912]: I1208 21:52:28.444965 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5416848-8b0d-4e46-bdba-8286d1ba9c2c" path="/var/lib/kubelet/pods/f5416848-8b0d-4e46-bdba-8286d1ba9c2c/volumes" Dec 08 21:52:34 crc kubenswrapper[4912]: I1208 21:52:34.427917 4912 scope.go:117] "RemoveContainer" containerID="92e18995603c63d0c9bffc4b4ec6c9b46c8efa5a8645b411ec59ec43013168c1" Dec 08 21:52:34 crc kubenswrapper[4912]: E1208 21:52:34.430023 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:52:44 crc kubenswrapper[4912]: I1208 21:52:44.287122 4912 scope.go:117] "RemoveContainer" containerID="7b8720abc03f91aae958786894395274ea4e82dd352a8aeb6ca46a8b262dcc98" Dec 08 21:52:44 crc kubenswrapper[4912]: I1208 21:52:44.316493 4912 scope.go:117] "RemoveContainer" containerID="f6f03e13b36854441d66c7286d3569ff3885f677d2d27d7280bdf141c1f4ba81" Dec 08 21:52:44 crc kubenswrapper[4912]: I1208 21:52:44.391656 4912 scope.go:117] "RemoveContainer" containerID="14571e613324d72cc87a6dabf81784b547cd9c94668a59c5c2b880babc610b8f" Dec 08 21:52:44 crc kubenswrapper[4912]: I1208 21:52:44.458409 4912 scope.go:117] "RemoveContainer" containerID="1202e5557ce1ffbef4095b2155cf352a4e2a9bf1f34d8772bf748c6f438f922c" Dec 08 21:52:44 crc kubenswrapper[4912]: I1208 21:52:44.496940 4912 scope.go:117] "RemoveContainer" containerID="df01e32bea4de2c9712b84e9ebb4506beea520d5b7e0576c690735e4b56765ae" Dec 08 21:52:44 crc kubenswrapper[4912]: I1208 21:52:44.552761 4912 scope.go:117] "RemoveContainer" containerID="a3f6bd14d174271a07233dcc9def8c8f178831cb67e476547552e2296bc20ceb" Dec 08 21:52:44 crc kubenswrapper[4912]: I1208 21:52:44.576131 4912 scope.go:117] "RemoveContainer" containerID="e1da749d342be1c8c839d40aa840357ec1ab99d7b6c02ace1d3441d8c709046d" Dec 08 21:52:49 crc kubenswrapper[4912]: I1208 21:52:49.428850 4912 scope.go:117] "RemoveContainer" containerID="92e18995603c63d0c9bffc4b4ec6c9b46c8efa5a8645b411ec59ec43013168c1" Dec 08 21:52:49 crc kubenswrapper[4912]: E1208 21:52:49.430003 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:53:02 crc kubenswrapper[4912]: I1208 21:53:02.427961 4912 scope.go:117] "RemoveContainer" containerID="92e18995603c63d0c9bffc4b4ec6c9b46c8efa5a8645b411ec59ec43013168c1" Dec 08 21:53:02 crc kubenswrapper[4912]: E1208 21:53:02.428673 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:53:03 crc kubenswrapper[4912]: I1208 21:53:03.044986 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lkdlz"] Dec 08 21:53:03 crc kubenswrapper[4912]: I1208 21:53:03.055149 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lkdlz"] Dec 08 21:53:04 crc kubenswrapper[4912]: I1208 21:53:04.438605 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fab798f-4b32-43d8-bb00-cb1557b45f4f" path="/var/lib/kubelet/pods/8fab798f-4b32-43d8-bb00-cb1557b45f4f/volumes" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.333453 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-987c7"] Dec 08 21:53:05 crc kubenswrapper[4912]: E1208 21:53:05.333866 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d18baec2-2dd0-476a-80f8-837cb09d6f99" containerName="registry-server" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.333885 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="d18baec2-2dd0-476a-80f8-837cb09d6f99" containerName="registry-server" Dec 08 21:53:05 crc kubenswrapper[4912]: E1208 21:53:05.333910 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d18baec2-2dd0-476a-80f8-837cb09d6f99" containerName="extract-utilities" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.333918 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="d18baec2-2dd0-476a-80f8-837cb09d6f99" containerName="extract-utilities" Dec 08 21:53:05 crc kubenswrapper[4912]: E1208 21:53:05.333941 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d18baec2-2dd0-476a-80f8-837cb09d6f99" containerName="extract-content" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.333947 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="d18baec2-2dd0-476a-80f8-837cb09d6f99" containerName="extract-content" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.334130 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="d18baec2-2dd0-476a-80f8-837cb09d6f99" containerName="registry-server" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.335512 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.367879 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-987c7"] Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.486384 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9796\" (UniqueName: \"kubernetes.io/projected/048a1f70-7799-4342-8a27-69a49c7987ee-kube-api-access-v9796\") pod \"redhat-operators-987c7\" (UID: \"048a1f70-7799-4342-8a27-69a49c7987ee\") " pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.486510 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048a1f70-7799-4342-8a27-69a49c7987ee-catalog-content\") pod \"redhat-operators-987c7\" (UID: \"048a1f70-7799-4342-8a27-69a49c7987ee\") " pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.486537 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048a1f70-7799-4342-8a27-69a49c7987ee-utilities\") pod \"redhat-operators-987c7\" (UID: \"048a1f70-7799-4342-8a27-69a49c7987ee\") " pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.588559 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048a1f70-7799-4342-8a27-69a49c7987ee-catalog-content\") pod \"redhat-operators-987c7\" (UID: \"048a1f70-7799-4342-8a27-69a49c7987ee\") " pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.588611 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048a1f70-7799-4342-8a27-69a49c7987ee-utilities\") pod \"redhat-operators-987c7\" (UID: \"048a1f70-7799-4342-8a27-69a49c7987ee\") " pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.588739 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9796\" (UniqueName: \"kubernetes.io/projected/048a1f70-7799-4342-8a27-69a49c7987ee-kube-api-access-v9796\") pod \"redhat-operators-987c7\" (UID: \"048a1f70-7799-4342-8a27-69a49c7987ee\") " pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.589190 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048a1f70-7799-4342-8a27-69a49c7987ee-catalog-content\") pod \"redhat-operators-987c7\" (UID: \"048a1f70-7799-4342-8a27-69a49c7987ee\") " pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.589494 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048a1f70-7799-4342-8a27-69a49c7987ee-utilities\") pod \"redhat-operators-987c7\" (UID: \"048a1f70-7799-4342-8a27-69a49c7987ee\") " pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.612988 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9796\" (UniqueName: \"kubernetes.io/projected/048a1f70-7799-4342-8a27-69a49c7987ee-kube-api-access-v9796\") pod \"redhat-operators-987c7\" (UID: \"048a1f70-7799-4342-8a27-69a49c7987ee\") " pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:05 crc kubenswrapper[4912]: I1208 21:53:05.684117 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:06 crc kubenswrapper[4912]: I1208 21:53:06.187668 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-987c7"] Dec 08 21:53:06 crc kubenswrapper[4912]: I1208 21:53:06.531770 4912 generic.go:334] "Generic (PLEG): container finished" podID="048a1f70-7799-4342-8a27-69a49c7987ee" containerID="27d25c756d34dfe0adceba1b7608505e728e8e8ad1a1a979ae5a51ae28e1c618" exitCode=0 Dec 08 21:53:06 crc kubenswrapper[4912]: I1208 21:53:06.532783 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-987c7" event={"ID":"048a1f70-7799-4342-8a27-69a49c7987ee","Type":"ContainerDied","Data":"27d25c756d34dfe0adceba1b7608505e728e8e8ad1a1a979ae5a51ae28e1c618"} Dec 08 21:53:06 crc kubenswrapper[4912]: I1208 21:53:06.533009 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-987c7" event={"ID":"048a1f70-7799-4342-8a27-69a49c7987ee","Type":"ContainerStarted","Data":"d8809369f438df4e51d443842e4724dc471864a6e6a3a984fed7a740df14d5e2"} Dec 08 21:53:07 crc kubenswrapper[4912]: I1208 21:53:07.542823 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-987c7" event={"ID":"048a1f70-7799-4342-8a27-69a49c7987ee","Type":"ContainerStarted","Data":"de45ec56767e180e8de684ddfaea95b2e5bae0a04d1b687e162df29ac37e5320"} Dec 08 21:53:12 crc kubenswrapper[4912]: I1208 21:53:12.587745 4912 generic.go:334] "Generic (PLEG): container finished" podID="048a1f70-7799-4342-8a27-69a49c7987ee" containerID="de45ec56767e180e8de684ddfaea95b2e5bae0a04d1b687e162df29ac37e5320" exitCode=0 Dec 08 21:53:12 crc kubenswrapper[4912]: I1208 21:53:12.587799 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-987c7" event={"ID":"048a1f70-7799-4342-8a27-69a49c7987ee","Type":"ContainerDied","Data":"de45ec56767e180e8de684ddfaea95b2e5bae0a04d1b687e162df29ac37e5320"} Dec 08 21:53:13 crc kubenswrapper[4912]: I1208 21:53:13.600541 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-987c7" event={"ID":"048a1f70-7799-4342-8a27-69a49c7987ee","Type":"ContainerStarted","Data":"682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db"} Dec 08 21:53:13 crc kubenswrapper[4912]: I1208 21:53:13.627898 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-987c7" podStartSLOduration=2.124570732 podStartE2EDuration="8.627842199s" podCreationTimestamp="2025-12-08 21:53:05 +0000 UTC" firstStartedPulling="2025-12-08 21:53:06.533687412 +0000 UTC m=+2068.396689485" lastFinishedPulling="2025-12-08 21:53:13.036958869 +0000 UTC m=+2074.899960952" observedRunningTime="2025-12-08 21:53:13.621412836 +0000 UTC m=+2075.484414929" watchObservedRunningTime="2025-12-08 21:53:13.627842199 +0000 UTC m=+2075.490844282" Dec 08 21:53:15 crc kubenswrapper[4912]: I1208 21:53:15.427694 4912 scope.go:117] "RemoveContainer" containerID="92e18995603c63d0c9bffc4b4ec6c9b46c8efa5a8645b411ec59ec43013168c1" Dec 08 21:53:15 crc kubenswrapper[4912]: I1208 21:53:15.685434 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:15 crc kubenswrapper[4912]: I1208 21:53:15.685758 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:16 crc kubenswrapper[4912]: I1208 21:53:16.626766 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerStarted","Data":"d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b"} Dec 08 21:53:16 crc kubenswrapper[4912]: I1208 21:53:16.627047 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:53:16 crc kubenswrapper[4912]: I1208 21:53:16.741353 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-987c7" podUID="048a1f70-7799-4342-8a27-69a49c7987ee" containerName="registry-server" probeResult="failure" output=< Dec 08 21:53:16 crc kubenswrapper[4912]: timeout: failed to connect service ":50051" within 1s Dec 08 21:53:16 crc kubenswrapper[4912]: > Dec 08 21:53:25 crc kubenswrapper[4912]: I1208 21:53:25.254765 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:53:25 crc kubenswrapper[4912]: I1208 21:53:25.730521 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:25 crc kubenswrapper[4912]: I1208 21:53:25.784734 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:25 crc kubenswrapper[4912]: I1208 21:53:25.966471 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-987c7"] Dec 08 21:53:26 crc kubenswrapper[4912]: I1208 21:53:26.029844 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-tzgbw"] Dec 08 21:53:26 crc kubenswrapper[4912]: I1208 21:53:26.037830 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-tzgbw"] Dec 08 21:53:26 crc kubenswrapper[4912]: I1208 21:53:26.439266 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ed2e5a0-8170-47d1-99be-0d6f5cd047bf" path="/var/lib/kubelet/pods/6ed2e5a0-8170-47d1-99be-0d6f5cd047bf/volumes" Dec 08 21:53:27 crc kubenswrapper[4912]: I1208 21:53:27.716785 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-987c7" podUID="048a1f70-7799-4342-8a27-69a49c7987ee" containerName="registry-server" containerID="cri-o://682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db" gracePeriod=2 Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.167697 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.226686 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048a1f70-7799-4342-8a27-69a49c7987ee-utilities\") pod \"048a1f70-7799-4342-8a27-69a49c7987ee\" (UID: \"048a1f70-7799-4342-8a27-69a49c7987ee\") " Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.226978 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9796\" (UniqueName: \"kubernetes.io/projected/048a1f70-7799-4342-8a27-69a49c7987ee-kube-api-access-v9796\") pod \"048a1f70-7799-4342-8a27-69a49c7987ee\" (UID: \"048a1f70-7799-4342-8a27-69a49c7987ee\") " Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.227098 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048a1f70-7799-4342-8a27-69a49c7987ee-catalog-content\") pod \"048a1f70-7799-4342-8a27-69a49c7987ee\" (UID: \"048a1f70-7799-4342-8a27-69a49c7987ee\") " Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.233203 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/048a1f70-7799-4342-8a27-69a49c7987ee-utilities" (OuterVolumeSpecName: "utilities") pod "048a1f70-7799-4342-8a27-69a49c7987ee" (UID: "048a1f70-7799-4342-8a27-69a49c7987ee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.234594 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/048a1f70-7799-4342-8a27-69a49c7987ee-kube-api-access-v9796" (OuterVolumeSpecName: "kube-api-access-v9796") pod "048a1f70-7799-4342-8a27-69a49c7987ee" (UID: "048a1f70-7799-4342-8a27-69a49c7987ee"). InnerVolumeSpecName "kube-api-access-v9796". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.333265 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048a1f70-7799-4342-8a27-69a49c7987ee-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.333306 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9796\" (UniqueName: \"kubernetes.io/projected/048a1f70-7799-4342-8a27-69a49c7987ee-kube-api-access-v9796\") on node \"crc\" DevicePath \"\"" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.345542 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/048a1f70-7799-4342-8a27-69a49c7987ee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "048a1f70-7799-4342-8a27-69a49c7987ee" (UID: "048a1f70-7799-4342-8a27-69a49c7987ee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.435324 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048a1f70-7799-4342-8a27-69a49c7987ee-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.726234 4912 generic.go:334] "Generic (PLEG): container finished" podID="048a1f70-7799-4342-8a27-69a49c7987ee" containerID="682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db" exitCode=0 Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.726293 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-987c7" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.726337 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-987c7" event={"ID":"048a1f70-7799-4342-8a27-69a49c7987ee","Type":"ContainerDied","Data":"682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db"} Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.726678 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-987c7" event={"ID":"048a1f70-7799-4342-8a27-69a49c7987ee","Type":"ContainerDied","Data":"d8809369f438df4e51d443842e4724dc471864a6e6a3a984fed7a740df14d5e2"} Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.726699 4912 scope.go:117] "RemoveContainer" containerID="682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.761549 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-987c7"] Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.764055 4912 scope.go:117] "RemoveContainer" containerID="de45ec56767e180e8de684ddfaea95b2e5bae0a04d1b687e162df29ac37e5320" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.773698 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-987c7"] Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.795912 4912 scope.go:117] "RemoveContainer" containerID="27d25c756d34dfe0adceba1b7608505e728e8e8ad1a1a979ae5a51ae28e1c618" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.838686 4912 scope.go:117] "RemoveContainer" containerID="682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db" Dec 08 21:53:28 crc kubenswrapper[4912]: E1208 21:53:28.839304 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db\": container with ID starting with 682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db not found: ID does not exist" containerID="682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.839371 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db"} err="failed to get container status \"682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db\": rpc error: code = NotFound desc = could not find container \"682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db\": container with ID starting with 682e22db21188eb41ec602005ba6f8264b2deb465318b09c4dcd5eac4d23c3db not found: ID does not exist" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.839406 4912 scope.go:117] "RemoveContainer" containerID="de45ec56767e180e8de684ddfaea95b2e5bae0a04d1b687e162df29ac37e5320" Dec 08 21:53:28 crc kubenswrapper[4912]: E1208 21:53:28.839904 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de45ec56767e180e8de684ddfaea95b2e5bae0a04d1b687e162df29ac37e5320\": container with ID starting with de45ec56767e180e8de684ddfaea95b2e5bae0a04d1b687e162df29ac37e5320 not found: ID does not exist" containerID="de45ec56767e180e8de684ddfaea95b2e5bae0a04d1b687e162df29ac37e5320" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.839936 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de45ec56767e180e8de684ddfaea95b2e5bae0a04d1b687e162df29ac37e5320"} err="failed to get container status \"de45ec56767e180e8de684ddfaea95b2e5bae0a04d1b687e162df29ac37e5320\": rpc error: code = NotFound desc = could not find container \"de45ec56767e180e8de684ddfaea95b2e5bae0a04d1b687e162df29ac37e5320\": container with ID starting with de45ec56767e180e8de684ddfaea95b2e5bae0a04d1b687e162df29ac37e5320 not found: ID does not exist" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.839959 4912 scope.go:117] "RemoveContainer" containerID="27d25c756d34dfe0adceba1b7608505e728e8e8ad1a1a979ae5a51ae28e1c618" Dec 08 21:53:28 crc kubenswrapper[4912]: E1208 21:53:28.845633 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27d25c756d34dfe0adceba1b7608505e728e8e8ad1a1a979ae5a51ae28e1c618\": container with ID starting with 27d25c756d34dfe0adceba1b7608505e728e8e8ad1a1a979ae5a51ae28e1c618 not found: ID does not exist" containerID="27d25c756d34dfe0adceba1b7608505e728e8e8ad1a1a979ae5a51ae28e1c618" Dec 08 21:53:28 crc kubenswrapper[4912]: I1208 21:53:28.845707 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27d25c756d34dfe0adceba1b7608505e728e8e8ad1a1a979ae5a51ae28e1c618"} err="failed to get container status \"27d25c756d34dfe0adceba1b7608505e728e8e8ad1a1a979ae5a51ae28e1c618\": rpc error: code = NotFound desc = could not find container \"27d25c756d34dfe0adceba1b7608505e728e8e8ad1a1a979ae5a51ae28e1c618\": container with ID starting with 27d25c756d34dfe0adceba1b7608505e728e8e8ad1a1a979ae5a51ae28e1c618 not found: ID does not exist" Dec 08 21:53:30 crc kubenswrapper[4912]: I1208 21:53:30.063605 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pps7s"] Dec 08 21:53:30 crc kubenswrapper[4912]: I1208 21:53:30.075881 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pps7s"] Dec 08 21:53:30 crc kubenswrapper[4912]: I1208 21:53:30.437976 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="048a1f70-7799-4342-8a27-69a49c7987ee" path="/var/lib/kubelet/pods/048a1f70-7799-4342-8a27-69a49c7987ee/volumes" Dec 08 21:53:30 crc kubenswrapper[4912]: I1208 21:53:30.439111 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a847f16f-0bee-43a2-ba39-28c7a0900a93" path="/var/lib/kubelet/pods/a847f16f-0bee-43a2-ba39-28c7a0900a93/volumes" Dec 08 21:53:44 crc kubenswrapper[4912]: I1208 21:53:44.712160 4912 scope.go:117] "RemoveContainer" containerID="1e97ccf49707cce920e248d3ec4c378fef0738c73e5b7434ec4b4d8bef882017" Dec 08 21:53:44 crc kubenswrapper[4912]: I1208 21:53:44.764672 4912 scope.go:117] "RemoveContainer" containerID="3324ce8add76f2989b9922b5a7486a75ff1be3230bb5ceaffc867c62e42cbd17" Dec 08 21:53:44 crc kubenswrapper[4912]: I1208 21:53:44.819684 4912 scope.go:117] "RemoveContainer" containerID="5b0e139b3bf78d4ff3a38df60b465ab79ec8c1e37f806d1152983d10efb82f91" Dec 08 21:54:02 crc kubenswrapper[4912]: I1208 21:54:02.965494 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:54:02 crc kubenswrapper[4912]: I1208 21:54:02.966080 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:54:11 crc kubenswrapper[4912]: I1208 21:54:11.039584 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-qbjjp"] Dec 08 21:54:11 crc kubenswrapper[4912]: I1208 21:54:11.046975 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-qbjjp"] Dec 08 21:54:12 crc kubenswrapper[4912]: I1208 21:54:12.439872 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb72a504-41c4-43a7-8875-32807e8d27c0" path="/var/lib/kubelet/pods/eb72a504-41c4-43a7-8875-32807e8d27c0/volumes" Dec 08 21:54:32 crc kubenswrapper[4912]: I1208 21:54:32.965210 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:54:32 crc kubenswrapper[4912]: I1208 21:54:32.965865 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:54:44 crc kubenswrapper[4912]: I1208 21:54:44.940809 4912 scope.go:117] "RemoveContainer" containerID="2029e01ae50661e9fe1df3a0fb3189a6f138c82f4dd4015664c6289ced89aa0c" Dec 08 21:55:02 crc kubenswrapper[4912]: I1208 21:55:02.966398 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:55:02 crc kubenswrapper[4912]: I1208 21:55:02.967242 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:55:02 crc kubenswrapper[4912]: I1208 21:55:02.967297 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:55:02 crc kubenswrapper[4912]: I1208 21:55:02.967957 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"396b55f109f7b5abd3be56a09199c2ddc4c83c80d58fdee522e52231482ed651"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:55:02 crc kubenswrapper[4912]: I1208 21:55:02.968020 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://396b55f109f7b5abd3be56a09199c2ddc4c83c80d58fdee522e52231482ed651" gracePeriod=600 Dec 08 21:55:03 crc kubenswrapper[4912]: I1208 21:55:03.608855 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="396b55f109f7b5abd3be56a09199c2ddc4c83c80d58fdee522e52231482ed651" exitCode=0 Dec 08 21:55:03 crc kubenswrapper[4912]: I1208 21:55:03.608956 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"396b55f109f7b5abd3be56a09199c2ddc4c83c80d58fdee522e52231482ed651"} Dec 08 21:55:03 crc kubenswrapper[4912]: I1208 21:55:03.609562 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7"} Dec 08 21:55:03 crc kubenswrapper[4912]: I1208 21:55:03.609603 4912 scope.go:117] "RemoveContainer" containerID="ca5bf41cea89fade5c3f935d69eb20eb052abc41d3fc1bc0e373bdf09ab1b5d3" Dec 08 21:55:46 crc kubenswrapper[4912]: I1208 21:55:46.983671 4912 generic.go:334] "Generic (PLEG): container finished" podID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" exitCode=1 Dec 08 21:55:46 crc kubenswrapper[4912]: I1208 21:55:46.983780 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerDied","Data":"d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b"} Dec 08 21:55:46 crc kubenswrapper[4912]: I1208 21:55:46.984335 4912 scope.go:117] "RemoveContainer" containerID="92e18995603c63d0c9bffc4b4ec6c9b46c8efa5a8645b411ec59ec43013168c1" Dec 08 21:55:46 crc kubenswrapper[4912]: I1208 21:55:46.987820 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:55:46 crc kubenswrapper[4912]: E1208 21:55:46.988485 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.100738 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jqwmz"] Dec 08 21:55:47 crc kubenswrapper[4912]: E1208 21:55:47.101222 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048a1f70-7799-4342-8a27-69a49c7987ee" containerName="extract-utilities" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.101245 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="048a1f70-7799-4342-8a27-69a49c7987ee" containerName="extract-utilities" Dec 08 21:55:47 crc kubenswrapper[4912]: E1208 21:55:47.101257 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048a1f70-7799-4342-8a27-69a49c7987ee" containerName="registry-server" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.101265 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="048a1f70-7799-4342-8a27-69a49c7987ee" containerName="registry-server" Dec 08 21:55:47 crc kubenswrapper[4912]: E1208 21:55:47.101287 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048a1f70-7799-4342-8a27-69a49c7987ee" containerName="extract-content" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.101297 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="048a1f70-7799-4342-8a27-69a49c7987ee" containerName="extract-content" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.101519 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="048a1f70-7799-4342-8a27-69a49c7987ee" containerName="registry-server" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.103294 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.109923 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jqwmz"] Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.208317 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsjg6\" (UniqueName: \"kubernetes.io/projected/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-kube-api-access-nsjg6\") pod \"certified-operators-jqwmz\" (UID: \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\") " pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.208561 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-utilities\") pod \"certified-operators-jqwmz\" (UID: \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\") " pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.208761 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-catalog-content\") pod \"certified-operators-jqwmz\" (UID: \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\") " pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.311165 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-catalog-content\") pod \"certified-operators-jqwmz\" (UID: \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\") " pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.311322 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsjg6\" (UniqueName: \"kubernetes.io/projected/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-kube-api-access-nsjg6\") pod \"certified-operators-jqwmz\" (UID: \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\") " pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.311418 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-utilities\") pod \"certified-operators-jqwmz\" (UID: \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\") " pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.311843 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-catalog-content\") pod \"certified-operators-jqwmz\" (UID: \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\") " pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.311882 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-utilities\") pod \"certified-operators-jqwmz\" (UID: \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\") " pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.335384 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsjg6\" (UniqueName: \"kubernetes.io/projected/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-kube-api-access-nsjg6\") pod \"certified-operators-jqwmz\" (UID: \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\") " pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.460589 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.947107 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jqwmz"] Dec 08 21:55:47 crc kubenswrapper[4912]: I1208 21:55:47.996785 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqwmz" event={"ID":"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea","Type":"ContainerStarted","Data":"545cada13b89b9759e48a65cd11038d679a1b63f54129a2ee0ac249b5180dddc"} Dec 08 21:55:49 crc kubenswrapper[4912]: I1208 21:55:49.010808 4912 generic.go:334] "Generic (PLEG): container finished" podID="ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" containerID="83877230ac540bd9dc07f95c3ac608f80b79422323eede1fe407c08a60972ad0" exitCode=0 Dec 08 21:55:49 crc kubenswrapper[4912]: I1208 21:55:49.010931 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqwmz" event={"ID":"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea","Type":"ContainerDied","Data":"83877230ac540bd9dc07f95c3ac608f80b79422323eede1fe407c08a60972ad0"} Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.284392 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7k24m"] Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.286721 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.294120 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7k24m"] Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.375083 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cf5f052-8cda-49c9-87ad-d80986f57fc8-catalog-content\") pod \"redhat-marketplace-7k24m\" (UID: \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\") " pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.375197 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cc6vg\" (UniqueName: \"kubernetes.io/projected/8cf5f052-8cda-49c9-87ad-d80986f57fc8-kube-api-access-cc6vg\") pod \"redhat-marketplace-7k24m\" (UID: \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\") " pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.375239 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cf5f052-8cda-49c9-87ad-d80986f57fc8-utilities\") pod \"redhat-marketplace-7k24m\" (UID: \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\") " pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.477440 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cf5f052-8cda-49c9-87ad-d80986f57fc8-catalog-content\") pod \"redhat-marketplace-7k24m\" (UID: \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\") " pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.477535 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cc6vg\" (UniqueName: \"kubernetes.io/projected/8cf5f052-8cda-49c9-87ad-d80986f57fc8-kube-api-access-cc6vg\") pod \"redhat-marketplace-7k24m\" (UID: \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\") " pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.477589 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cf5f052-8cda-49c9-87ad-d80986f57fc8-utilities\") pod \"redhat-marketplace-7k24m\" (UID: \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\") " pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.477979 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cf5f052-8cda-49c9-87ad-d80986f57fc8-catalog-content\") pod \"redhat-marketplace-7k24m\" (UID: \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\") " pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.478123 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cf5f052-8cda-49c9-87ad-d80986f57fc8-utilities\") pod \"redhat-marketplace-7k24m\" (UID: \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\") " pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.504679 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cc6vg\" (UniqueName: \"kubernetes.io/projected/8cf5f052-8cda-49c9-87ad-d80986f57fc8-kube-api-access-cc6vg\") pod \"redhat-marketplace-7k24m\" (UID: \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\") " pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:55:50 crc kubenswrapper[4912]: I1208 21:55:50.604761 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:55:51 crc kubenswrapper[4912]: I1208 21:55:51.029545 4912 generic.go:334] "Generic (PLEG): container finished" podID="ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" containerID="2433453e8c5836404fc6748c638b688e7853ced46b771147d27683522788fa86" exitCode=0 Dec 08 21:55:51 crc kubenswrapper[4912]: I1208 21:55:51.029629 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqwmz" event={"ID":"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea","Type":"ContainerDied","Data":"2433453e8c5836404fc6748c638b688e7853ced46b771147d27683522788fa86"} Dec 08 21:55:51 crc kubenswrapper[4912]: I1208 21:55:51.076341 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7k24m"] Dec 08 21:55:52 crc kubenswrapper[4912]: I1208 21:55:52.041505 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqwmz" event={"ID":"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea","Type":"ContainerStarted","Data":"cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7"} Dec 08 21:55:52 crc kubenswrapper[4912]: I1208 21:55:52.045875 4912 generic.go:334] "Generic (PLEG): container finished" podID="8cf5f052-8cda-49c9-87ad-d80986f57fc8" containerID="b62c4132d733766b99d27373f6f9925d4a48db372b2ddecd64e25069eec16079" exitCode=0 Dec 08 21:55:52 crc kubenswrapper[4912]: I1208 21:55:52.045916 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7k24m" event={"ID":"8cf5f052-8cda-49c9-87ad-d80986f57fc8","Type":"ContainerDied","Data":"b62c4132d733766b99d27373f6f9925d4a48db372b2ddecd64e25069eec16079"} Dec 08 21:55:52 crc kubenswrapper[4912]: I1208 21:55:52.045938 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7k24m" event={"ID":"8cf5f052-8cda-49c9-87ad-d80986f57fc8","Type":"ContainerStarted","Data":"52e3294a84cbe1ffbdb7c16d796b8aa30bd78bda7a0af989027bffa31e834b5c"} Dec 08 21:55:52 crc kubenswrapper[4912]: I1208 21:55:52.059968 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jqwmz" podStartSLOduration=2.495760888 podStartE2EDuration="5.059950544s" podCreationTimestamp="2025-12-08 21:55:47 +0000 UTC" firstStartedPulling="2025-12-08 21:55:49.014526717 +0000 UTC m=+2230.877528790" lastFinishedPulling="2025-12-08 21:55:51.578716363 +0000 UTC m=+2233.441718446" observedRunningTime="2025-12-08 21:55:52.056788624 +0000 UTC m=+2233.919790707" watchObservedRunningTime="2025-12-08 21:55:52.059950544 +0000 UTC m=+2233.922952627" Dec 08 21:55:53 crc kubenswrapper[4912]: I1208 21:55:53.056445 4912 generic.go:334] "Generic (PLEG): container finished" podID="8cf5f052-8cda-49c9-87ad-d80986f57fc8" containerID="b76b963493e40fa4c03acb6ca9f6d578deaf802f04cad503e2aeb374979853d3" exitCode=0 Dec 08 21:55:53 crc kubenswrapper[4912]: I1208 21:55:53.056499 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7k24m" event={"ID":"8cf5f052-8cda-49c9-87ad-d80986f57fc8","Type":"ContainerDied","Data":"b76b963493e40fa4c03acb6ca9f6d578deaf802f04cad503e2aeb374979853d3"} Dec 08 21:55:54 crc kubenswrapper[4912]: I1208 21:55:54.069453 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7k24m" event={"ID":"8cf5f052-8cda-49c9-87ad-d80986f57fc8","Type":"ContainerStarted","Data":"df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1"} Dec 08 21:55:54 crc kubenswrapper[4912]: I1208 21:55:54.100283 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7k24m" podStartSLOduration=2.650935734 podStartE2EDuration="4.10026395s" podCreationTimestamp="2025-12-08 21:55:50 +0000 UTC" firstStartedPulling="2025-12-08 21:55:52.047402317 +0000 UTC m=+2233.910404400" lastFinishedPulling="2025-12-08 21:55:53.496730533 +0000 UTC m=+2235.359732616" observedRunningTime="2025-12-08 21:55:54.094274408 +0000 UTC m=+2235.957276491" watchObservedRunningTime="2025-12-08 21:55:54.10026395 +0000 UTC m=+2235.963266033" Dec 08 21:55:55 crc kubenswrapper[4912]: I1208 21:55:55.249282 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:55:55 crc kubenswrapper[4912]: I1208 21:55:55.250551 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:55:55 crc kubenswrapper[4912]: E1208 21:55:55.250832 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:55:57 crc kubenswrapper[4912]: I1208 21:55:57.461068 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:57 crc kubenswrapper[4912]: I1208 21:55:57.461394 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:57 crc kubenswrapper[4912]: I1208 21:55:57.514949 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:55:58 crc kubenswrapper[4912]: I1208 21:55:58.151876 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:56:00 crc kubenswrapper[4912]: I1208 21:56:00.605737 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:56:00 crc kubenswrapper[4912]: I1208 21:56:00.606188 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:56:00 crc kubenswrapper[4912]: I1208 21:56:00.653587 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:56:01 crc kubenswrapper[4912]: I1208 21:56:01.172299 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:56:02 crc kubenswrapper[4912]: I1208 21:56:02.076266 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jqwmz"] Dec 08 21:56:02 crc kubenswrapper[4912]: I1208 21:56:02.076730 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jqwmz" podUID="ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" containerName="registry-server" containerID="cri-o://cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7" gracePeriod=2 Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.041124 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.083924 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7k24m"] Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.128549 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-catalog-content\") pod \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\" (UID: \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\") " Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.128596 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsjg6\" (UniqueName: \"kubernetes.io/projected/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-kube-api-access-nsjg6\") pod \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\" (UID: \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\") " Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.128676 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-utilities\") pod \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\" (UID: \"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea\") " Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.129974 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-utilities" (OuterVolumeSpecName: "utilities") pod "ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" (UID: "ebd6c44b-2f4c-48a0-bb62-05ecab0273ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.134824 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-kube-api-access-nsjg6" (OuterVolumeSpecName: "kube-api-access-nsjg6") pod "ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" (UID: "ebd6c44b-2f4c-48a0-bb62-05ecab0273ea"). InnerVolumeSpecName "kube-api-access-nsjg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.147906 4912 generic.go:334] "Generic (PLEG): container finished" podID="ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" containerID="cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7" exitCode=0 Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.148007 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jqwmz" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.148006 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqwmz" event={"ID":"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea","Type":"ContainerDied","Data":"cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7"} Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.148084 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqwmz" event={"ID":"ebd6c44b-2f4c-48a0-bb62-05ecab0273ea","Type":"ContainerDied","Data":"545cada13b89b9759e48a65cd11038d679a1b63f54129a2ee0ac249b5180dddc"} Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.148106 4912 scope.go:117] "RemoveContainer" containerID="cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.148151 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7k24m" podUID="8cf5f052-8cda-49c9-87ad-d80986f57fc8" containerName="registry-server" containerID="cri-o://df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1" gracePeriod=2 Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.179629 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" (UID: "ebd6c44b-2f4c-48a0-bb62-05ecab0273ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.190794 4912 scope.go:117] "RemoveContainer" containerID="2433453e8c5836404fc6748c638b688e7853ced46b771147d27683522788fa86" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.212654 4912 scope.go:117] "RemoveContainer" containerID="83877230ac540bd9dc07f95c3ac608f80b79422323eede1fe407c08a60972ad0" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.231719 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.231748 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.231760 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsjg6\" (UniqueName: \"kubernetes.io/projected/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea-kube-api-access-nsjg6\") on node \"crc\" DevicePath \"\"" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.249629 4912 scope.go:117] "RemoveContainer" containerID="cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7" Dec 08 21:56:03 crc kubenswrapper[4912]: E1208 21:56:03.250011 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7\": container with ID starting with cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7 not found: ID does not exist" containerID="cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.250057 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7"} err="failed to get container status \"cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7\": rpc error: code = NotFound desc = could not find container \"cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7\": container with ID starting with cc4e9a9e32754017223bd2901b0f9b0a7fd522ed716751da57f019dda1c5afe7 not found: ID does not exist" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.250080 4912 scope.go:117] "RemoveContainer" containerID="2433453e8c5836404fc6748c638b688e7853ced46b771147d27683522788fa86" Dec 08 21:56:03 crc kubenswrapper[4912]: E1208 21:56:03.250371 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2433453e8c5836404fc6748c638b688e7853ced46b771147d27683522788fa86\": container with ID starting with 2433453e8c5836404fc6748c638b688e7853ced46b771147d27683522788fa86 not found: ID does not exist" containerID="2433453e8c5836404fc6748c638b688e7853ced46b771147d27683522788fa86" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.250400 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2433453e8c5836404fc6748c638b688e7853ced46b771147d27683522788fa86"} err="failed to get container status \"2433453e8c5836404fc6748c638b688e7853ced46b771147d27683522788fa86\": rpc error: code = NotFound desc = could not find container \"2433453e8c5836404fc6748c638b688e7853ced46b771147d27683522788fa86\": container with ID starting with 2433453e8c5836404fc6748c638b688e7853ced46b771147d27683522788fa86 not found: ID does not exist" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.250415 4912 scope.go:117] "RemoveContainer" containerID="83877230ac540bd9dc07f95c3ac608f80b79422323eede1fe407c08a60972ad0" Dec 08 21:56:03 crc kubenswrapper[4912]: E1208 21:56:03.250665 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83877230ac540bd9dc07f95c3ac608f80b79422323eede1fe407c08a60972ad0\": container with ID starting with 83877230ac540bd9dc07f95c3ac608f80b79422323eede1fe407c08a60972ad0 not found: ID does not exist" containerID="83877230ac540bd9dc07f95c3ac608f80b79422323eede1fe407c08a60972ad0" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.250709 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83877230ac540bd9dc07f95c3ac608f80b79422323eede1fe407c08a60972ad0"} err="failed to get container status \"83877230ac540bd9dc07f95c3ac608f80b79422323eede1fe407c08a60972ad0\": rpc error: code = NotFound desc = could not find container \"83877230ac540bd9dc07f95c3ac608f80b79422323eede1fe407c08a60972ad0\": container with ID starting with 83877230ac540bd9dc07f95c3ac608f80b79422323eede1fe407c08a60972ad0 not found: ID does not exist" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.490108 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jqwmz"] Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.496823 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jqwmz"] Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.617093 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.747403 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cf5f052-8cda-49c9-87ad-d80986f57fc8-catalog-content\") pod \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\" (UID: \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\") " Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.747494 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cf5f052-8cda-49c9-87ad-d80986f57fc8-utilities\") pod \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\" (UID: \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\") " Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.747551 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cc6vg\" (UniqueName: \"kubernetes.io/projected/8cf5f052-8cda-49c9-87ad-d80986f57fc8-kube-api-access-cc6vg\") pod \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\" (UID: \"8cf5f052-8cda-49c9-87ad-d80986f57fc8\") " Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.748920 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cf5f052-8cda-49c9-87ad-d80986f57fc8-utilities" (OuterVolumeSpecName: "utilities") pod "8cf5f052-8cda-49c9-87ad-d80986f57fc8" (UID: "8cf5f052-8cda-49c9-87ad-d80986f57fc8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.751837 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cf5f052-8cda-49c9-87ad-d80986f57fc8-kube-api-access-cc6vg" (OuterVolumeSpecName: "kube-api-access-cc6vg") pod "8cf5f052-8cda-49c9-87ad-d80986f57fc8" (UID: "8cf5f052-8cda-49c9-87ad-d80986f57fc8"). InnerVolumeSpecName "kube-api-access-cc6vg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.769031 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cf5f052-8cda-49c9-87ad-d80986f57fc8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8cf5f052-8cda-49c9-87ad-d80986f57fc8" (UID: "8cf5f052-8cda-49c9-87ad-d80986f57fc8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.850575 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cc6vg\" (UniqueName: \"kubernetes.io/projected/8cf5f052-8cda-49c9-87ad-d80986f57fc8-kube-api-access-cc6vg\") on node \"crc\" DevicePath \"\"" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.850630 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cf5f052-8cda-49c9-87ad-d80986f57fc8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 21:56:03 crc kubenswrapper[4912]: I1208 21:56:03.850647 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cf5f052-8cda-49c9-87ad-d80986f57fc8-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.162667 4912 generic.go:334] "Generic (PLEG): container finished" podID="8cf5f052-8cda-49c9-87ad-d80986f57fc8" containerID="df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1" exitCode=0 Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.162716 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7k24m" event={"ID":"8cf5f052-8cda-49c9-87ad-d80986f57fc8","Type":"ContainerDied","Data":"df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1"} Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.162750 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7k24m" event={"ID":"8cf5f052-8cda-49c9-87ad-d80986f57fc8","Type":"ContainerDied","Data":"52e3294a84cbe1ffbdb7c16d796b8aa30bd78bda7a0af989027bffa31e834b5c"} Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.162773 4912 scope.go:117] "RemoveContainer" containerID="df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1" Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.163237 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7k24m" Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.205205 4912 scope.go:117] "RemoveContainer" containerID="b76b963493e40fa4c03acb6ca9f6d578deaf802f04cad503e2aeb374979853d3" Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.206903 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7k24m"] Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.221024 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7k24m"] Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.233514 4912 scope.go:117] "RemoveContainer" containerID="b62c4132d733766b99d27373f6f9925d4a48db372b2ddecd64e25069eec16079" Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.269513 4912 scope.go:117] "RemoveContainer" containerID="df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1" Dec 08 21:56:04 crc kubenswrapper[4912]: E1208 21:56:04.270120 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1\": container with ID starting with df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1 not found: ID does not exist" containerID="df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1" Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.270174 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1"} err="failed to get container status \"df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1\": rpc error: code = NotFound desc = could not find container \"df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1\": container with ID starting with df0677a101ab64c8add711dbfa3a87d368799894504aac044cdbbd3ed10ecef1 not found: ID does not exist" Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.270212 4912 scope.go:117] "RemoveContainer" containerID="b76b963493e40fa4c03acb6ca9f6d578deaf802f04cad503e2aeb374979853d3" Dec 08 21:56:04 crc kubenswrapper[4912]: E1208 21:56:04.270669 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b76b963493e40fa4c03acb6ca9f6d578deaf802f04cad503e2aeb374979853d3\": container with ID starting with b76b963493e40fa4c03acb6ca9f6d578deaf802f04cad503e2aeb374979853d3 not found: ID does not exist" containerID="b76b963493e40fa4c03acb6ca9f6d578deaf802f04cad503e2aeb374979853d3" Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.270729 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b76b963493e40fa4c03acb6ca9f6d578deaf802f04cad503e2aeb374979853d3"} err="failed to get container status \"b76b963493e40fa4c03acb6ca9f6d578deaf802f04cad503e2aeb374979853d3\": rpc error: code = NotFound desc = could not find container \"b76b963493e40fa4c03acb6ca9f6d578deaf802f04cad503e2aeb374979853d3\": container with ID starting with b76b963493e40fa4c03acb6ca9f6d578deaf802f04cad503e2aeb374979853d3 not found: ID does not exist" Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.270761 4912 scope.go:117] "RemoveContainer" containerID="b62c4132d733766b99d27373f6f9925d4a48db372b2ddecd64e25069eec16079" Dec 08 21:56:04 crc kubenswrapper[4912]: E1208 21:56:04.271126 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b62c4132d733766b99d27373f6f9925d4a48db372b2ddecd64e25069eec16079\": container with ID starting with b62c4132d733766b99d27373f6f9925d4a48db372b2ddecd64e25069eec16079 not found: ID does not exist" containerID="b62c4132d733766b99d27373f6f9925d4a48db372b2ddecd64e25069eec16079" Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.271290 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b62c4132d733766b99d27373f6f9925d4a48db372b2ddecd64e25069eec16079"} err="failed to get container status \"b62c4132d733766b99d27373f6f9925d4a48db372b2ddecd64e25069eec16079\": rpc error: code = NotFound desc = could not find container \"b62c4132d733766b99d27373f6f9925d4a48db372b2ddecd64e25069eec16079\": container with ID starting with b62c4132d733766b99d27373f6f9925d4a48db372b2ddecd64e25069eec16079 not found: ID does not exist" Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.441824 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cf5f052-8cda-49c9-87ad-d80986f57fc8" path="/var/lib/kubelet/pods/8cf5f052-8cda-49c9-87ad-d80986f57fc8/volumes" Dec 08 21:56:04 crc kubenswrapper[4912]: I1208 21:56:04.442772 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" path="/var/lib/kubelet/pods/ebd6c44b-2f4c-48a0-bb62-05ecab0273ea/volumes" Dec 08 21:56:05 crc kubenswrapper[4912]: I1208 21:56:05.249358 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:56:05 crc kubenswrapper[4912]: I1208 21:56:05.249848 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:56:05 crc kubenswrapper[4912]: E1208 21:56:05.250086 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:56:17 crc kubenswrapper[4912]: I1208 21:56:17.427987 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:56:17 crc kubenswrapper[4912]: E1208 21:56:17.428811 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:56:28 crc kubenswrapper[4912]: I1208 21:56:28.434983 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:56:28 crc kubenswrapper[4912]: E1208 21:56:28.435893 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:56:41 crc kubenswrapper[4912]: I1208 21:56:41.428297 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:56:41 crc kubenswrapper[4912]: E1208 21:56:41.429073 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:56:55 crc kubenswrapper[4912]: I1208 21:56:55.428773 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:56:55 crc kubenswrapper[4912]: E1208 21:56:55.429718 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:57:08 crc kubenswrapper[4912]: I1208 21:57:08.434816 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:57:08 crc kubenswrapper[4912]: E1208 21:57:08.435651 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:57:20 crc kubenswrapper[4912]: I1208 21:57:20.428253 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:57:20 crc kubenswrapper[4912]: E1208 21:57:20.429109 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:57:31 crc kubenswrapper[4912]: I1208 21:57:31.428878 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:57:31 crc kubenswrapper[4912]: E1208 21:57:31.430233 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:57:32 crc kubenswrapper[4912]: I1208 21:57:32.965605 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:57:32 crc kubenswrapper[4912]: I1208 21:57:32.965975 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:57:46 crc kubenswrapper[4912]: I1208 21:57:46.428985 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:57:46 crc kubenswrapper[4912]: E1208 21:57:46.429949 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:58:00 crc kubenswrapper[4912]: I1208 21:58:00.427579 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:58:00 crc kubenswrapper[4912]: E1208 21:58:00.428444 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:58:02 crc kubenswrapper[4912]: I1208 21:58:02.965667 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:58:02 crc kubenswrapper[4912]: I1208 21:58:02.966112 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:58:14 crc kubenswrapper[4912]: I1208 21:58:14.431656 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:58:14 crc kubenswrapper[4912]: E1208 21:58:14.432720 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:58:26 crc kubenswrapper[4912]: I1208 21:58:26.428725 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:58:26 crc kubenswrapper[4912]: E1208 21:58:26.429587 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 21:58:32 crc kubenswrapper[4912]: I1208 21:58:32.965110 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 21:58:32 crc kubenswrapper[4912]: I1208 21:58:32.965639 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 21:58:32 crc kubenswrapper[4912]: I1208 21:58:32.965688 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 21:58:32 crc kubenswrapper[4912]: I1208 21:58:32.966460 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 21:58:32 crc kubenswrapper[4912]: I1208 21:58:32.966518 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" gracePeriod=600 Dec 08 21:58:33 crc kubenswrapper[4912]: E1208 21:58:33.091258 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:58:33 crc kubenswrapper[4912]: I1208 21:58:33.706416 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" exitCode=0 Dec 08 21:58:33 crc kubenswrapper[4912]: I1208 21:58:33.706465 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7"} Dec 08 21:58:33 crc kubenswrapper[4912]: I1208 21:58:33.706501 4912 scope.go:117] "RemoveContainer" containerID="396b55f109f7b5abd3be56a09199c2ddc4c83c80d58fdee522e52231482ed651" Dec 08 21:58:33 crc kubenswrapper[4912]: I1208 21:58:33.707195 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 21:58:33 crc kubenswrapper[4912]: E1208 21:58:33.707457 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:58:39 crc kubenswrapper[4912]: I1208 21:58:39.428836 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 21:58:39 crc kubenswrapper[4912]: I1208 21:58:39.763886 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerStarted","Data":"741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573"} Dec 08 21:58:39 crc kubenswrapper[4912]: I1208 21:58:39.764322 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:58:45 crc kubenswrapper[4912]: I1208 21:58:45.251057 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 21:58:46 crc kubenswrapper[4912]: I1208 21:58:46.431101 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 21:58:46 crc kubenswrapper[4912]: E1208 21:58:46.431607 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:59:01 crc kubenswrapper[4912]: I1208 21:59:01.428222 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 21:59:01 crc kubenswrapper[4912]: E1208 21:59:01.428959 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:59:16 crc kubenswrapper[4912]: I1208 21:59:16.429067 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 21:59:16 crc kubenswrapper[4912]: E1208 21:59:16.429803 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:59:29 crc kubenswrapper[4912]: I1208 21:59:29.427800 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 21:59:29 crc kubenswrapper[4912]: E1208 21:59:29.429807 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:59:44 crc kubenswrapper[4912]: I1208 21:59:44.428007 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 21:59:44 crc kubenswrapper[4912]: E1208 21:59:44.429252 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 21:59:55 crc kubenswrapper[4912]: I1208 21:59:55.428724 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 21:59:55 crc kubenswrapper[4912]: E1208 21:59:55.429526 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.146471 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842"] Dec 08 22:00:00 crc kubenswrapper[4912]: E1208 22:00:00.147738 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" containerName="registry-server" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.147771 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" containerName="registry-server" Dec 08 22:00:00 crc kubenswrapper[4912]: E1208 22:00:00.147799 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cf5f052-8cda-49c9-87ad-d80986f57fc8" containerName="extract-utilities" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.147808 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cf5f052-8cda-49c9-87ad-d80986f57fc8" containerName="extract-utilities" Dec 08 22:00:00 crc kubenswrapper[4912]: E1208 22:00:00.147835 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cf5f052-8cda-49c9-87ad-d80986f57fc8" containerName="registry-server" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.147845 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cf5f052-8cda-49c9-87ad-d80986f57fc8" containerName="registry-server" Dec 08 22:00:00 crc kubenswrapper[4912]: E1208 22:00:00.147855 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" containerName="extract-utilities" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.147861 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" containerName="extract-utilities" Dec 08 22:00:00 crc kubenswrapper[4912]: E1208 22:00:00.147876 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cf5f052-8cda-49c9-87ad-d80986f57fc8" containerName="extract-content" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.147883 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cf5f052-8cda-49c9-87ad-d80986f57fc8" containerName="extract-content" Dec 08 22:00:00 crc kubenswrapper[4912]: E1208 22:00:00.147902 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" containerName="extract-content" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.147909 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" containerName="extract-content" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.150693 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebd6c44b-2f4c-48a0-bb62-05ecab0273ea" containerName="registry-server" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.150740 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cf5f052-8cda-49c9-87ad-d80986f57fc8" containerName="registry-server" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.151660 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.154521 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.154787 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.173599 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842"] Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.328693 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-config-volume\") pod \"collect-profiles-29420520-zm842\" (UID: \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.329426 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf5k8\" (UniqueName: \"kubernetes.io/projected/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-kube-api-access-jf5k8\") pod \"collect-profiles-29420520-zm842\" (UID: \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.329530 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-secret-volume\") pod \"collect-profiles-29420520-zm842\" (UID: \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.431498 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf5k8\" (UniqueName: \"kubernetes.io/projected/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-kube-api-access-jf5k8\") pod \"collect-profiles-29420520-zm842\" (UID: \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.431575 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-secret-volume\") pod \"collect-profiles-29420520-zm842\" (UID: \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.433157 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-config-volume\") pod \"collect-profiles-29420520-zm842\" (UID: \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.434211 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-config-volume\") pod \"collect-profiles-29420520-zm842\" (UID: \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.443637 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-secret-volume\") pod \"collect-profiles-29420520-zm842\" (UID: \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.449620 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf5k8\" (UniqueName: \"kubernetes.io/projected/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-kube-api-access-jf5k8\") pod \"collect-profiles-29420520-zm842\" (UID: \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.543676 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:00 crc kubenswrapper[4912]: I1208 22:00:00.998770 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842"] Dec 08 22:00:01 crc kubenswrapper[4912]: I1208 22:00:01.556238 4912 generic.go:334] "Generic (PLEG): container finished" podID="0935d643-3ed9-4abe-b56d-8cfcebbf71a0" containerID="137afa6ff79206a7fad6b892b388eb43c0a12199da8683d3d3064a8e030921e9" exitCode=0 Dec 08 22:00:01 crc kubenswrapper[4912]: I1208 22:00:01.556656 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" event={"ID":"0935d643-3ed9-4abe-b56d-8cfcebbf71a0","Type":"ContainerDied","Data":"137afa6ff79206a7fad6b892b388eb43c0a12199da8683d3d3064a8e030921e9"} Dec 08 22:00:01 crc kubenswrapper[4912]: I1208 22:00:01.556689 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" event={"ID":"0935d643-3ed9-4abe-b56d-8cfcebbf71a0","Type":"ContainerStarted","Data":"a9f115c67668eacdb667887eec9f8ef2bd7eca21ed3f63eeff3b3ebe02548a65"} Dec 08 22:00:02 crc kubenswrapper[4912]: I1208 22:00:02.881316 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.077506 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-config-volume\") pod \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\" (UID: \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\") " Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.077666 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jf5k8\" (UniqueName: \"kubernetes.io/projected/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-kube-api-access-jf5k8\") pod \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\" (UID: \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\") " Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.077732 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-secret-volume\") pod \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\" (UID: \"0935d643-3ed9-4abe-b56d-8cfcebbf71a0\") " Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.078599 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-config-volume" (OuterVolumeSpecName: "config-volume") pod "0935d643-3ed9-4abe-b56d-8cfcebbf71a0" (UID: "0935d643-3ed9-4abe-b56d-8cfcebbf71a0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.083341 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0935d643-3ed9-4abe-b56d-8cfcebbf71a0" (UID: "0935d643-3ed9-4abe-b56d-8cfcebbf71a0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.084470 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-kube-api-access-jf5k8" (OuterVolumeSpecName: "kube-api-access-jf5k8") pod "0935d643-3ed9-4abe-b56d-8cfcebbf71a0" (UID: "0935d643-3ed9-4abe-b56d-8cfcebbf71a0"). InnerVolumeSpecName "kube-api-access-jf5k8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.180160 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jf5k8\" (UniqueName: \"kubernetes.io/projected/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-kube-api-access-jf5k8\") on node \"crc\" DevicePath \"\"" Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.180508 4912 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.180520 4912 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0935d643-3ed9-4abe-b56d-8cfcebbf71a0-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.572242 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" event={"ID":"0935d643-3ed9-4abe-b56d-8cfcebbf71a0","Type":"ContainerDied","Data":"a9f115c67668eacdb667887eec9f8ef2bd7eca21ed3f63eeff3b3ebe02548a65"} Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.572296 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9f115c67668eacdb667887eec9f8ef2bd7eca21ed3f63eeff3b3ebe02548a65" Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.572541 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420520-zm842" Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.968910 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh"] Dec 08 22:00:03 crc kubenswrapper[4912]: I1208 22:00:03.975642 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420475-4g5jh"] Dec 08 22:00:04 crc kubenswrapper[4912]: I1208 22:00:04.440480 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1259f14f-f93a-4765-9f93-ac4af158951e" path="/var/lib/kubelet/pods/1259f14f-f93a-4765-9f93-ac4af158951e/volumes" Dec 08 22:00:07 crc kubenswrapper[4912]: I1208 22:00:07.427754 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:00:07 crc kubenswrapper[4912]: E1208 22:00:07.428476 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:00:21 crc kubenswrapper[4912]: I1208 22:00:21.428207 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:00:21 crc kubenswrapper[4912]: E1208 22:00:21.428900 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:00:35 crc kubenswrapper[4912]: I1208 22:00:35.428331 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:00:35 crc kubenswrapper[4912]: E1208 22:00:35.429020 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:00:45 crc kubenswrapper[4912]: I1208 22:00:45.163286 4912 scope.go:117] "RemoveContainer" containerID="2ad250d2f2c63eb441a1c978152f4a9a62ebb307b39ac3fdd52cca03e61bb421" Dec 08 22:00:50 crc kubenswrapper[4912]: I1208 22:00:50.427649 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:00:50 crc kubenswrapper[4912]: E1208 22:00:50.429144 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.151073 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29420521-kmpmr"] Dec 08 22:01:00 crc kubenswrapper[4912]: E1208 22:01:00.151924 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0935d643-3ed9-4abe-b56d-8cfcebbf71a0" containerName="collect-profiles" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.151938 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="0935d643-3ed9-4abe-b56d-8cfcebbf71a0" containerName="collect-profiles" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.152146 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="0935d643-3ed9-4abe-b56d-8cfcebbf71a0" containerName="collect-profiles" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.152920 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.163978 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29420521-kmpmr"] Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.314276 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-fernet-keys\") pod \"keystone-cron-29420521-kmpmr\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.314351 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-combined-ca-bundle\") pod \"keystone-cron-29420521-kmpmr\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.314419 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xfj2\" (UniqueName: \"kubernetes.io/projected/46da716d-9ea1-452b-b59b-48609b5ad68a-kube-api-access-2xfj2\") pod \"keystone-cron-29420521-kmpmr\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.314476 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-config-data\") pod \"keystone-cron-29420521-kmpmr\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.416460 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-fernet-keys\") pod \"keystone-cron-29420521-kmpmr\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.416564 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-combined-ca-bundle\") pod \"keystone-cron-29420521-kmpmr\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.416622 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xfj2\" (UniqueName: \"kubernetes.io/projected/46da716d-9ea1-452b-b59b-48609b5ad68a-kube-api-access-2xfj2\") pod \"keystone-cron-29420521-kmpmr\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.416686 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-config-data\") pod \"keystone-cron-29420521-kmpmr\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.422168 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-config-data\") pod \"keystone-cron-29420521-kmpmr\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.422137 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-combined-ca-bundle\") pod \"keystone-cron-29420521-kmpmr\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.423309 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-fernet-keys\") pod \"keystone-cron-29420521-kmpmr\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.435219 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xfj2\" (UniqueName: \"kubernetes.io/projected/46da716d-9ea1-452b-b59b-48609b5ad68a-kube-api-access-2xfj2\") pod \"keystone-cron-29420521-kmpmr\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.527535 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:00 crc kubenswrapper[4912]: I1208 22:01:00.948205 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29420521-kmpmr"] Dec 08 22:01:01 crc kubenswrapper[4912]: I1208 22:01:01.136913 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29420521-kmpmr" event={"ID":"46da716d-9ea1-452b-b59b-48609b5ad68a","Type":"ContainerStarted","Data":"4c2d91c518f320efdbe081319fcbc92a6f1118d19537bccbb07e4a2f70f3e242"} Dec 08 22:01:01 crc kubenswrapper[4912]: I1208 22:01:01.137246 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29420521-kmpmr" event={"ID":"46da716d-9ea1-452b-b59b-48609b5ad68a","Type":"ContainerStarted","Data":"6f8641923bb80d4c3f46caed2e22a15c0ff08cd94b018e4b0e1f63f0dee6115d"} Dec 08 22:01:01 crc kubenswrapper[4912]: I1208 22:01:01.157228 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29420521-kmpmr" podStartSLOduration=1.157186272 podStartE2EDuration="1.157186272s" podCreationTimestamp="2025-12-08 22:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 22:01:01.152597558 +0000 UTC m=+2543.015599641" watchObservedRunningTime="2025-12-08 22:01:01.157186272 +0000 UTC m=+2543.020188355" Dec 08 22:01:01 crc kubenswrapper[4912]: I1208 22:01:01.429073 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:01:01 crc kubenswrapper[4912]: E1208 22:01:01.429269 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:01:04 crc kubenswrapper[4912]: I1208 22:01:04.181208 4912 generic.go:334] "Generic (PLEG): container finished" podID="46da716d-9ea1-452b-b59b-48609b5ad68a" containerID="4c2d91c518f320efdbe081319fcbc92a6f1118d19537bccbb07e4a2f70f3e242" exitCode=0 Dec 08 22:01:04 crc kubenswrapper[4912]: I1208 22:01:04.181331 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29420521-kmpmr" event={"ID":"46da716d-9ea1-452b-b59b-48609b5ad68a","Type":"ContainerDied","Data":"4c2d91c518f320efdbe081319fcbc92a6f1118d19537bccbb07e4a2f70f3e242"} Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.544908 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.624144 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-fernet-keys\") pod \"46da716d-9ea1-452b-b59b-48609b5ad68a\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.624205 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-config-data\") pod \"46da716d-9ea1-452b-b59b-48609b5ad68a\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.624258 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-combined-ca-bundle\") pod \"46da716d-9ea1-452b-b59b-48609b5ad68a\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.624293 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xfj2\" (UniqueName: \"kubernetes.io/projected/46da716d-9ea1-452b-b59b-48609b5ad68a-kube-api-access-2xfj2\") pod \"46da716d-9ea1-452b-b59b-48609b5ad68a\" (UID: \"46da716d-9ea1-452b-b59b-48609b5ad68a\") " Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.629809 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46da716d-9ea1-452b-b59b-48609b5ad68a-kube-api-access-2xfj2" (OuterVolumeSpecName: "kube-api-access-2xfj2") pod "46da716d-9ea1-452b-b59b-48609b5ad68a" (UID: "46da716d-9ea1-452b-b59b-48609b5ad68a"). InnerVolumeSpecName "kube-api-access-2xfj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.630932 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "46da716d-9ea1-452b-b59b-48609b5ad68a" (UID: "46da716d-9ea1-452b-b59b-48609b5ad68a"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.653763 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46da716d-9ea1-452b-b59b-48609b5ad68a" (UID: "46da716d-9ea1-452b-b59b-48609b5ad68a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.671536 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-config-data" (OuterVolumeSpecName: "config-data") pod "46da716d-9ea1-452b-b59b-48609b5ad68a" (UID: "46da716d-9ea1-452b-b59b-48609b5ad68a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.726915 4912 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.727123 4912 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-config-data\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.727187 4912 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46da716d-9ea1-452b-b59b-48609b5ad68a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:05 crc kubenswrapper[4912]: I1208 22:01:05.727244 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xfj2\" (UniqueName: \"kubernetes.io/projected/46da716d-9ea1-452b-b59b-48609b5ad68a-kube-api-access-2xfj2\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:06 crc kubenswrapper[4912]: I1208 22:01:06.202575 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29420521-kmpmr" event={"ID":"46da716d-9ea1-452b-b59b-48609b5ad68a","Type":"ContainerDied","Data":"6f8641923bb80d4c3f46caed2e22a15c0ff08cd94b018e4b0e1f63f0dee6115d"} Dec 08 22:01:06 crc kubenswrapper[4912]: I1208 22:01:06.202648 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29420521-kmpmr" Dec 08 22:01:06 crc kubenswrapper[4912]: I1208 22:01:06.202651 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f8641923bb80d4c3f46caed2e22a15c0ff08cd94b018e4b0e1f63f0dee6115d" Dec 08 22:01:11 crc kubenswrapper[4912]: I1208 22:01:11.246417 4912 generic.go:334] "Generic (PLEG): container finished" podID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" exitCode=1 Dec 08 22:01:11 crc kubenswrapper[4912]: I1208 22:01:11.246496 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerDied","Data":"741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573"} Dec 08 22:01:11 crc kubenswrapper[4912]: I1208 22:01:11.247171 4912 scope.go:117] "RemoveContainer" containerID="d22127e3207031d76998eb28360e829188829fe26fa8ec8f188205b31313a59b" Dec 08 22:01:11 crc kubenswrapper[4912]: I1208 22:01:11.248245 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:01:11 crc kubenswrapper[4912]: E1208 22:01:11.248652 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:01:15 crc kubenswrapper[4912]: I1208 22:01:15.249424 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 22:01:15 crc kubenswrapper[4912]: I1208 22:01:15.250508 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:01:15 crc kubenswrapper[4912]: E1208 22:01:15.250793 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:01:16 crc kubenswrapper[4912]: I1208 22:01:16.428238 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:01:16 crc kubenswrapper[4912]: E1208 22:01:16.429831 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:01:16 crc kubenswrapper[4912]: I1208 22:01:16.882137 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-68lk2"] Dec 08 22:01:16 crc kubenswrapper[4912]: E1208 22:01:16.882595 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46da716d-9ea1-452b-b59b-48609b5ad68a" containerName="keystone-cron" Dec 08 22:01:16 crc kubenswrapper[4912]: I1208 22:01:16.882618 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="46da716d-9ea1-452b-b59b-48609b5ad68a" containerName="keystone-cron" Dec 08 22:01:16 crc kubenswrapper[4912]: I1208 22:01:16.882935 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="46da716d-9ea1-452b-b59b-48609b5ad68a" containerName="keystone-cron" Dec 08 22:01:16 crc kubenswrapper[4912]: I1208 22:01:16.884762 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:17 crc kubenswrapper[4912]: I1208 22:01:16.891387 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-68lk2"] Dec 08 22:01:17 crc kubenswrapper[4912]: I1208 22:01:17.040342 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f18b039-7e6f-45f6-940d-9c6585bf5052-utilities\") pod \"community-operators-68lk2\" (UID: \"7f18b039-7e6f-45f6-940d-9c6585bf5052\") " pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:17 crc kubenswrapper[4912]: I1208 22:01:17.040389 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f18b039-7e6f-45f6-940d-9c6585bf5052-catalog-content\") pod \"community-operators-68lk2\" (UID: \"7f18b039-7e6f-45f6-940d-9c6585bf5052\") " pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:17 crc kubenswrapper[4912]: I1208 22:01:17.040455 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wx5td\" (UniqueName: \"kubernetes.io/projected/7f18b039-7e6f-45f6-940d-9c6585bf5052-kube-api-access-wx5td\") pod \"community-operators-68lk2\" (UID: \"7f18b039-7e6f-45f6-940d-9c6585bf5052\") " pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:17 crc kubenswrapper[4912]: I1208 22:01:17.142524 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wx5td\" (UniqueName: \"kubernetes.io/projected/7f18b039-7e6f-45f6-940d-9c6585bf5052-kube-api-access-wx5td\") pod \"community-operators-68lk2\" (UID: \"7f18b039-7e6f-45f6-940d-9c6585bf5052\") " pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:17 crc kubenswrapper[4912]: I1208 22:01:17.142968 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f18b039-7e6f-45f6-940d-9c6585bf5052-utilities\") pod \"community-operators-68lk2\" (UID: \"7f18b039-7e6f-45f6-940d-9c6585bf5052\") " pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:17 crc kubenswrapper[4912]: I1208 22:01:17.143123 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f18b039-7e6f-45f6-940d-9c6585bf5052-catalog-content\") pod \"community-operators-68lk2\" (UID: \"7f18b039-7e6f-45f6-940d-9c6585bf5052\") " pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:17 crc kubenswrapper[4912]: I1208 22:01:17.143532 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f18b039-7e6f-45f6-940d-9c6585bf5052-utilities\") pod \"community-operators-68lk2\" (UID: \"7f18b039-7e6f-45f6-940d-9c6585bf5052\") " pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:17 crc kubenswrapper[4912]: I1208 22:01:17.143565 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f18b039-7e6f-45f6-940d-9c6585bf5052-catalog-content\") pod \"community-operators-68lk2\" (UID: \"7f18b039-7e6f-45f6-940d-9c6585bf5052\") " pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:17 crc kubenswrapper[4912]: I1208 22:01:17.163154 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wx5td\" (UniqueName: \"kubernetes.io/projected/7f18b039-7e6f-45f6-940d-9c6585bf5052-kube-api-access-wx5td\") pod \"community-operators-68lk2\" (UID: \"7f18b039-7e6f-45f6-940d-9c6585bf5052\") " pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:17 crc kubenswrapper[4912]: I1208 22:01:17.369606 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:17 crc kubenswrapper[4912]: I1208 22:01:17.894905 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-68lk2"] Dec 08 22:01:17 crc kubenswrapper[4912]: W1208 22:01:17.908747 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f18b039_7e6f_45f6_940d_9c6585bf5052.slice/crio-d1dc6d33c8d7c966c80a8387c8296c2c662cf7818d71f7ca7b8dbc583c4a94c4 WatchSource:0}: Error finding container d1dc6d33c8d7c966c80a8387c8296c2c662cf7818d71f7ca7b8dbc583c4a94c4: Status 404 returned error can't find the container with id d1dc6d33c8d7c966c80a8387c8296c2c662cf7818d71f7ca7b8dbc583c4a94c4 Dec 08 22:01:18 crc kubenswrapper[4912]: I1208 22:01:18.309896 4912 generic.go:334] "Generic (PLEG): container finished" podID="7f18b039-7e6f-45f6-940d-9c6585bf5052" containerID="b3c51678def32b9b22b9f49f14163a324bd41aae3adcfc5070a440010603ad91" exitCode=0 Dec 08 22:01:18 crc kubenswrapper[4912]: I1208 22:01:18.309953 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-68lk2" event={"ID":"7f18b039-7e6f-45f6-940d-9c6585bf5052","Type":"ContainerDied","Data":"b3c51678def32b9b22b9f49f14163a324bd41aae3adcfc5070a440010603ad91"} Dec 08 22:01:18 crc kubenswrapper[4912]: I1208 22:01:18.309986 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-68lk2" event={"ID":"7f18b039-7e6f-45f6-940d-9c6585bf5052","Type":"ContainerStarted","Data":"d1dc6d33c8d7c966c80a8387c8296c2c662cf7818d71f7ca7b8dbc583c4a94c4"} Dec 08 22:01:18 crc kubenswrapper[4912]: I1208 22:01:18.312652 4912 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 22:01:20 crc kubenswrapper[4912]: I1208 22:01:20.327771 4912 generic.go:334] "Generic (PLEG): container finished" podID="7f18b039-7e6f-45f6-940d-9c6585bf5052" containerID="a8dbad0c34c9e980d26ef810ac3e957690a2c69a700da3aa4e6cded6933bf6f5" exitCode=0 Dec 08 22:01:20 crc kubenswrapper[4912]: I1208 22:01:20.328631 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-68lk2" event={"ID":"7f18b039-7e6f-45f6-940d-9c6585bf5052","Type":"ContainerDied","Data":"a8dbad0c34c9e980d26ef810ac3e957690a2c69a700da3aa4e6cded6933bf6f5"} Dec 08 22:01:21 crc kubenswrapper[4912]: I1208 22:01:21.341835 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-68lk2" event={"ID":"7f18b039-7e6f-45f6-940d-9c6585bf5052","Type":"ContainerStarted","Data":"824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0"} Dec 08 22:01:21 crc kubenswrapper[4912]: I1208 22:01:21.373520 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-68lk2" podStartSLOduration=2.92153941 podStartE2EDuration="5.373489035s" podCreationTimestamp="2025-12-08 22:01:16 +0000 UTC" firstStartedPulling="2025-12-08 22:01:18.312303032 +0000 UTC m=+2560.175305125" lastFinishedPulling="2025-12-08 22:01:20.764252667 +0000 UTC m=+2562.627254750" observedRunningTime="2025-12-08 22:01:21.362825701 +0000 UTC m=+2563.225827804" watchObservedRunningTime="2025-12-08 22:01:21.373489035 +0000 UTC m=+2563.236491138" Dec 08 22:01:25 crc kubenswrapper[4912]: I1208 22:01:25.249381 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 22:01:25 crc kubenswrapper[4912]: I1208 22:01:25.250514 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:01:25 crc kubenswrapper[4912]: E1208 22:01:25.250913 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:01:27 crc kubenswrapper[4912]: I1208 22:01:27.370422 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:27 crc kubenswrapper[4912]: I1208 22:01:27.370786 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:27 crc kubenswrapper[4912]: I1208 22:01:27.423464 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:27 crc kubenswrapper[4912]: I1208 22:01:27.472169 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:27 crc kubenswrapper[4912]: I1208 22:01:27.665694 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-68lk2"] Dec 08 22:01:28 crc kubenswrapper[4912]: I1208 22:01:28.433691 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:01:28 crc kubenswrapper[4912]: E1208 22:01:28.433959 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:01:29 crc kubenswrapper[4912]: I1208 22:01:29.422528 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-68lk2" podUID="7f18b039-7e6f-45f6-940d-9c6585bf5052" containerName="registry-server" containerID="cri-o://824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0" gracePeriod=2 Dec 08 22:01:29 crc kubenswrapper[4912]: E1208 22:01:29.644596 4912 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f18b039_7e6f_45f6_940d_9c6585bf5052.slice/crio-conmon-824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0.scope\": RecentStats: unable to find data in memory cache]" Dec 08 22:01:29 crc kubenswrapper[4912]: I1208 22:01:29.894132 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.009170 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f18b039-7e6f-45f6-940d-9c6585bf5052-utilities\") pod \"7f18b039-7e6f-45f6-940d-9c6585bf5052\" (UID: \"7f18b039-7e6f-45f6-940d-9c6585bf5052\") " Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.009355 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wx5td\" (UniqueName: \"kubernetes.io/projected/7f18b039-7e6f-45f6-940d-9c6585bf5052-kube-api-access-wx5td\") pod \"7f18b039-7e6f-45f6-940d-9c6585bf5052\" (UID: \"7f18b039-7e6f-45f6-940d-9c6585bf5052\") " Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.009384 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f18b039-7e6f-45f6-940d-9c6585bf5052-catalog-content\") pod \"7f18b039-7e6f-45f6-940d-9c6585bf5052\" (UID: \"7f18b039-7e6f-45f6-940d-9c6585bf5052\") " Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.010639 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f18b039-7e6f-45f6-940d-9c6585bf5052-utilities" (OuterVolumeSpecName: "utilities") pod "7f18b039-7e6f-45f6-940d-9c6585bf5052" (UID: "7f18b039-7e6f-45f6-940d-9c6585bf5052"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.014599 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f18b039-7e6f-45f6-940d-9c6585bf5052-kube-api-access-wx5td" (OuterVolumeSpecName: "kube-api-access-wx5td") pod "7f18b039-7e6f-45f6-940d-9c6585bf5052" (UID: "7f18b039-7e6f-45f6-940d-9c6585bf5052"). InnerVolumeSpecName "kube-api-access-wx5td". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.062726 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f18b039-7e6f-45f6-940d-9c6585bf5052-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7f18b039-7e6f-45f6-940d-9c6585bf5052" (UID: "7f18b039-7e6f-45f6-940d-9c6585bf5052"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.111322 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wx5td\" (UniqueName: \"kubernetes.io/projected/7f18b039-7e6f-45f6-940d-9c6585bf5052-kube-api-access-wx5td\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.111362 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f18b039-7e6f-45f6-940d-9c6585bf5052-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.111374 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f18b039-7e6f-45f6-940d-9c6585bf5052-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.451480 4912 generic.go:334] "Generic (PLEG): container finished" podID="7f18b039-7e6f-45f6-940d-9c6585bf5052" containerID="824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0" exitCode=0 Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.451546 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-68lk2" event={"ID":"7f18b039-7e6f-45f6-940d-9c6585bf5052","Type":"ContainerDied","Data":"824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0"} Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.451578 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-68lk2" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.451593 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-68lk2" event={"ID":"7f18b039-7e6f-45f6-940d-9c6585bf5052","Type":"ContainerDied","Data":"d1dc6d33c8d7c966c80a8387c8296c2c662cf7818d71f7ca7b8dbc583c4a94c4"} Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.451622 4912 scope.go:117] "RemoveContainer" containerID="824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.478113 4912 scope.go:117] "RemoveContainer" containerID="a8dbad0c34c9e980d26ef810ac3e957690a2c69a700da3aa4e6cded6933bf6f5" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.503647 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-68lk2"] Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.512188 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-68lk2"] Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.513208 4912 scope.go:117] "RemoveContainer" containerID="b3c51678def32b9b22b9f49f14163a324bd41aae3adcfc5070a440010603ad91" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.544160 4912 scope.go:117] "RemoveContainer" containerID="824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0" Dec 08 22:01:30 crc kubenswrapper[4912]: E1208 22:01:30.544676 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0\": container with ID starting with 824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0 not found: ID does not exist" containerID="824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.544756 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0"} err="failed to get container status \"824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0\": rpc error: code = NotFound desc = could not find container \"824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0\": container with ID starting with 824be8b817139c9a05ea39940dc1b5f5c7da049d1383cd7734e2d493bce3dca0 not found: ID does not exist" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.544780 4912 scope.go:117] "RemoveContainer" containerID="a8dbad0c34c9e980d26ef810ac3e957690a2c69a700da3aa4e6cded6933bf6f5" Dec 08 22:01:30 crc kubenswrapper[4912]: E1208 22:01:30.545931 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8dbad0c34c9e980d26ef810ac3e957690a2c69a700da3aa4e6cded6933bf6f5\": container with ID starting with a8dbad0c34c9e980d26ef810ac3e957690a2c69a700da3aa4e6cded6933bf6f5 not found: ID does not exist" containerID="a8dbad0c34c9e980d26ef810ac3e957690a2c69a700da3aa4e6cded6933bf6f5" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.545998 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8dbad0c34c9e980d26ef810ac3e957690a2c69a700da3aa4e6cded6933bf6f5"} err="failed to get container status \"a8dbad0c34c9e980d26ef810ac3e957690a2c69a700da3aa4e6cded6933bf6f5\": rpc error: code = NotFound desc = could not find container \"a8dbad0c34c9e980d26ef810ac3e957690a2c69a700da3aa4e6cded6933bf6f5\": container with ID starting with a8dbad0c34c9e980d26ef810ac3e957690a2c69a700da3aa4e6cded6933bf6f5 not found: ID does not exist" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.546028 4912 scope.go:117] "RemoveContainer" containerID="b3c51678def32b9b22b9f49f14163a324bd41aae3adcfc5070a440010603ad91" Dec 08 22:01:30 crc kubenswrapper[4912]: E1208 22:01:30.546417 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3c51678def32b9b22b9f49f14163a324bd41aae3adcfc5070a440010603ad91\": container with ID starting with b3c51678def32b9b22b9f49f14163a324bd41aae3adcfc5070a440010603ad91 not found: ID does not exist" containerID="b3c51678def32b9b22b9f49f14163a324bd41aae3adcfc5070a440010603ad91" Dec 08 22:01:30 crc kubenswrapper[4912]: I1208 22:01:30.546513 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3c51678def32b9b22b9f49f14163a324bd41aae3adcfc5070a440010603ad91"} err="failed to get container status \"b3c51678def32b9b22b9f49f14163a324bd41aae3adcfc5070a440010603ad91\": rpc error: code = NotFound desc = could not find container \"b3c51678def32b9b22b9f49f14163a324bd41aae3adcfc5070a440010603ad91\": container with ID starting with b3c51678def32b9b22b9f49f14163a324bd41aae3adcfc5070a440010603ad91 not found: ID does not exist" Dec 08 22:01:32 crc kubenswrapper[4912]: I1208 22:01:32.444851 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f18b039-7e6f-45f6-940d-9c6585bf5052" path="/var/lib/kubelet/pods/7f18b039-7e6f-45f6-940d-9c6585bf5052/volumes" Dec 08 22:01:38 crc kubenswrapper[4912]: I1208 22:01:38.434856 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:01:38 crc kubenswrapper[4912]: E1208 22:01:38.435540 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:01:42 crc kubenswrapper[4912]: I1208 22:01:42.428454 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:01:42 crc kubenswrapper[4912]: E1208 22:01:42.429350 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:01:50 crc kubenswrapper[4912]: I1208 22:01:50.428193 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:01:50 crc kubenswrapper[4912]: E1208 22:01:50.428855 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:01:54 crc kubenswrapper[4912]: I1208 22:01:54.427824 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:01:54 crc kubenswrapper[4912]: E1208 22:01:54.428709 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:02:02 crc kubenswrapper[4912]: I1208 22:02:02.427977 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:02:02 crc kubenswrapper[4912]: E1208 22:02:02.428956 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:02:08 crc kubenswrapper[4912]: I1208 22:02:08.433518 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:02:08 crc kubenswrapper[4912]: E1208 22:02:08.434310 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:02:14 crc kubenswrapper[4912]: I1208 22:02:14.428525 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:02:14 crc kubenswrapper[4912]: E1208 22:02:14.429340 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:02:22 crc kubenswrapper[4912]: I1208 22:02:22.428967 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:02:22 crc kubenswrapper[4912]: E1208 22:02:22.429772 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:02:28 crc kubenswrapper[4912]: I1208 22:02:28.433717 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:02:28 crc kubenswrapper[4912]: E1208 22:02:28.434535 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:02:36 crc kubenswrapper[4912]: I1208 22:02:36.428066 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:02:36 crc kubenswrapper[4912]: E1208 22:02:36.428862 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:02:42 crc kubenswrapper[4912]: I1208 22:02:42.428385 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:02:42 crc kubenswrapper[4912]: E1208 22:02:42.429128 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:02:48 crc kubenswrapper[4912]: I1208 22:02:48.435510 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:02:48 crc kubenswrapper[4912]: E1208 22:02:48.436100 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:02:55 crc kubenswrapper[4912]: I1208 22:02:55.428505 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:02:55 crc kubenswrapper[4912]: E1208 22:02:55.430717 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:03:03 crc kubenswrapper[4912]: I1208 22:03:03.428563 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:03:03 crc kubenswrapper[4912]: E1208 22:03:03.429411 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:03:06 crc kubenswrapper[4912]: I1208 22:03:06.427800 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:03:06 crc kubenswrapper[4912]: E1208 22:03:06.428589 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:03:16 crc kubenswrapper[4912]: I1208 22:03:16.428392 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:03:16 crc kubenswrapper[4912]: E1208 22:03:16.429253 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:03:20 crc kubenswrapper[4912]: I1208 22:03:20.428287 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:03:20 crc kubenswrapper[4912]: E1208 22:03:20.429223 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:03:29 crc kubenswrapper[4912]: I1208 22:03:29.428600 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:03:29 crc kubenswrapper[4912]: E1208 22:03:29.429402 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:03:32 crc kubenswrapper[4912]: I1208 22:03:32.428259 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:03:32 crc kubenswrapper[4912]: E1208 22:03:32.428810 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:03:44 crc kubenswrapper[4912]: I1208 22:03:44.428678 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:03:45 crc kubenswrapper[4912]: I1208 22:03:45.597355 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"62edc3736393a4509d2a8daec91bd6cfd277ae4e59c18698707b365e0211b0d3"} Dec 08 22:03:46 crc kubenswrapper[4912]: I1208 22:03:46.428479 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:03:46 crc kubenswrapper[4912]: E1208 22:03:46.429332 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:03:58 crc kubenswrapper[4912]: I1208 22:03:58.435240 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:03:58 crc kubenswrapper[4912]: E1208 22:03:58.436371 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.173613 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sd75d"] Dec 08 22:04:06 crc kubenswrapper[4912]: E1208 22:04:06.174638 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f18b039-7e6f-45f6-940d-9c6585bf5052" containerName="registry-server" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.174657 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f18b039-7e6f-45f6-940d-9c6585bf5052" containerName="registry-server" Dec 08 22:04:06 crc kubenswrapper[4912]: E1208 22:04:06.174677 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f18b039-7e6f-45f6-940d-9c6585bf5052" containerName="extract-utilities" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.174685 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f18b039-7e6f-45f6-940d-9c6585bf5052" containerName="extract-utilities" Dec 08 22:04:06 crc kubenswrapper[4912]: E1208 22:04:06.174711 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f18b039-7e6f-45f6-940d-9c6585bf5052" containerName="extract-content" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.174720 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f18b039-7e6f-45f6-940d-9c6585bf5052" containerName="extract-content" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.174967 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f18b039-7e6f-45f6-940d-9c6585bf5052" containerName="registry-server" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.177990 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.186915 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sd75d"] Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.285687 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jshcv\" (UniqueName: \"kubernetes.io/projected/b08b52de-e733-436f-ab3a-6bce652e2394-kube-api-access-jshcv\") pod \"redhat-operators-sd75d\" (UID: \"b08b52de-e733-436f-ab3a-6bce652e2394\") " pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.285759 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08b52de-e733-436f-ab3a-6bce652e2394-utilities\") pod \"redhat-operators-sd75d\" (UID: \"b08b52de-e733-436f-ab3a-6bce652e2394\") " pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.286160 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08b52de-e733-436f-ab3a-6bce652e2394-catalog-content\") pod \"redhat-operators-sd75d\" (UID: \"b08b52de-e733-436f-ab3a-6bce652e2394\") " pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.388219 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08b52de-e733-436f-ab3a-6bce652e2394-catalog-content\") pod \"redhat-operators-sd75d\" (UID: \"b08b52de-e733-436f-ab3a-6bce652e2394\") " pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.388326 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jshcv\" (UniqueName: \"kubernetes.io/projected/b08b52de-e733-436f-ab3a-6bce652e2394-kube-api-access-jshcv\") pod \"redhat-operators-sd75d\" (UID: \"b08b52de-e733-436f-ab3a-6bce652e2394\") " pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.388367 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08b52de-e733-436f-ab3a-6bce652e2394-utilities\") pod \"redhat-operators-sd75d\" (UID: \"b08b52de-e733-436f-ab3a-6bce652e2394\") " pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.388742 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08b52de-e733-436f-ab3a-6bce652e2394-catalog-content\") pod \"redhat-operators-sd75d\" (UID: \"b08b52de-e733-436f-ab3a-6bce652e2394\") " pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.388764 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08b52de-e733-436f-ab3a-6bce652e2394-utilities\") pod \"redhat-operators-sd75d\" (UID: \"b08b52de-e733-436f-ab3a-6bce652e2394\") " pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.410112 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jshcv\" (UniqueName: \"kubernetes.io/projected/b08b52de-e733-436f-ab3a-6bce652e2394-kube-api-access-jshcv\") pod \"redhat-operators-sd75d\" (UID: \"b08b52de-e733-436f-ab3a-6bce652e2394\") " pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:06 crc kubenswrapper[4912]: I1208 22:04:06.501952 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:07 crc kubenswrapper[4912]: I1208 22:04:07.000558 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sd75d"] Dec 08 22:04:07 crc kubenswrapper[4912]: W1208 22:04:07.003237 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb08b52de_e733_436f_ab3a_6bce652e2394.slice/crio-1bd54c97f1645749601ecc4c45af6e7ed7a29ed3a4c4afd59327554c65c7742d WatchSource:0}: Error finding container 1bd54c97f1645749601ecc4c45af6e7ed7a29ed3a4c4afd59327554c65c7742d: Status 404 returned error can't find the container with id 1bd54c97f1645749601ecc4c45af6e7ed7a29ed3a4c4afd59327554c65c7742d Dec 08 22:04:07 crc kubenswrapper[4912]: I1208 22:04:07.804827 4912 generic.go:334] "Generic (PLEG): container finished" podID="b08b52de-e733-436f-ab3a-6bce652e2394" containerID="b8b3128f933dae680b66aa78311409367fd19c8229b5f7090280226e9cb16cf7" exitCode=0 Dec 08 22:04:07 crc kubenswrapper[4912]: I1208 22:04:07.804884 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sd75d" event={"ID":"b08b52de-e733-436f-ab3a-6bce652e2394","Type":"ContainerDied","Data":"b8b3128f933dae680b66aa78311409367fd19c8229b5f7090280226e9cb16cf7"} Dec 08 22:04:07 crc kubenswrapper[4912]: I1208 22:04:07.806648 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sd75d" event={"ID":"b08b52de-e733-436f-ab3a-6bce652e2394","Type":"ContainerStarted","Data":"1bd54c97f1645749601ecc4c45af6e7ed7a29ed3a4c4afd59327554c65c7742d"} Dec 08 22:04:08 crc kubenswrapper[4912]: I1208 22:04:08.816783 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sd75d" event={"ID":"b08b52de-e733-436f-ab3a-6bce652e2394","Type":"ContainerStarted","Data":"37c3d5c3a0cd87f52652dbcab122a5d021c61d483a05387ddf23479c07a92dc7"} Dec 08 22:04:11 crc kubenswrapper[4912]: I1208 22:04:11.843411 4912 generic.go:334] "Generic (PLEG): container finished" podID="b08b52de-e733-436f-ab3a-6bce652e2394" containerID="37c3d5c3a0cd87f52652dbcab122a5d021c61d483a05387ddf23479c07a92dc7" exitCode=0 Dec 08 22:04:11 crc kubenswrapper[4912]: I1208 22:04:11.843453 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sd75d" event={"ID":"b08b52de-e733-436f-ab3a-6bce652e2394","Type":"ContainerDied","Data":"37c3d5c3a0cd87f52652dbcab122a5d021c61d483a05387ddf23479c07a92dc7"} Dec 08 22:04:12 crc kubenswrapper[4912]: I1208 22:04:12.428084 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:04:12 crc kubenswrapper[4912]: E1208 22:04:12.428720 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:04:13 crc kubenswrapper[4912]: I1208 22:04:13.873238 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sd75d" event={"ID":"b08b52de-e733-436f-ab3a-6bce652e2394","Type":"ContainerStarted","Data":"cbe35671e4c4b1f026b5fc860cf1da52614e0412606a97dc77e80d6ac6412f32"} Dec 08 22:04:13 crc kubenswrapper[4912]: I1208 22:04:13.899964 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sd75d" podStartSLOduration=3.057962437 podStartE2EDuration="7.899933448s" podCreationTimestamp="2025-12-08 22:04:06 +0000 UTC" firstStartedPulling="2025-12-08 22:04:07.806646236 +0000 UTC m=+2729.669648319" lastFinishedPulling="2025-12-08 22:04:12.648617257 +0000 UTC m=+2734.511619330" observedRunningTime="2025-12-08 22:04:13.895902717 +0000 UTC m=+2735.758904800" watchObservedRunningTime="2025-12-08 22:04:13.899933448 +0000 UTC m=+2735.762935531" Dec 08 22:04:16 crc kubenswrapper[4912]: I1208 22:04:16.503161 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:16 crc kubenswrapper[4912]: I1208 22:04:16.504384 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:17 crc kubenswrapper[4912]: I1208 22:04:17.546712 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-sd75d" podUID="b08b52de-e733-436f-ab3a-6bce652e2394" containerName="registry-server" probeResult="failure" output=< Dec 08 22:04:17 crc kubenswrapper[4912]: timeout: failed to connect service ":50051" within 1s Dec 08 22:04:17 crc kubenswrapper[4912]: > Dec 08 22:04:24 crc kubenswrapper[4912]: I1208 22:04:24.428326 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:04:24 crc kubenswrapper[4912]: E1208 22:04:24.429155 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:04:26 crc kubenswrapper[4912]: I1208 22:04:26.544894 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:26 crc kubenswrapper[4912]: I1208 22:04:26.613758 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:26 crc kubenswrapper[4912]: I1208 22:04:26.777265 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sd75d"] Dec 08 22:04:27 crc kubenswrapper[4912]: I1208 22:04:27.984919 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sd75d" podUID="b08b52de-e733-436f-ab3a-6bce652e2394" containerName="registry-server" containerID="cri-o://cbe35671e4c4b1f026b5fc860cf1da52614e0412606a97dc77e80d6ac6412f32" gracePeriod=2 Dec 08 22:04:28 crc kubenswrapper[4912]: I1208 22:04:28.995938 4912 generic.go:334] "Generic (PLEG): container finished" podID="b08b52de-e733-436f-ab3a-6bce652e2394" containerID="cbe35671e4c4b1f026b5fc860cf1da52614e0412606a97dc77e80d6ac6412f32" exitCode=0 Dec 08 22:04:28 crc kubenswrapper[4912]: I1208 22:04:28.995975 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sd75d" event={"ID":"b08b52de-e733-436f-ab3a-6bce652e2394","Type":"ContainerDied","Data":"cbe35671e4c4b1f026b5fc860cf1da52614e0412606a97dc77e80d6ac6412f32"} Dec 08 22:04:28 crc kubenswrapper[4912]: I1208 22:04:28.996428 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sd75d" event={"ID":"b08b52de-e733-436f-ab3a-6bce652e2394","Type":"ContainerDied","Data":"1bd54c97f1645749601ecc4c45af6e7ed7a29ed3a4c4afd59327554c65c7742d"} Dec 08 22:04:28 crc kubenswrapper[4912]: I1208 22:04:28.996450 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1bd54c97f1645749601ecc4c45af6e7ed7a29ed3a4c4afd59327554c65c7742d" Dec 08 22:04:29 crc kubenswrapper[4912]: I1208 22:04:29.032724 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:29 crc kubenswrapper[4912]: I1208 22:04:29.154645 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08b52de-e733-436f-ab3a-6bce652e2394-utilities\") pod \"b08b52de-e733-436f-ab3a-6bce652e2394\" (UID: \"b08b52de-e733-436f-ab3a-6bce652e2394\") " Dec 08 22:04:29 crc kubenswrapper[4912]: I1208 22:04:29.154685 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08b52de-e733-436f-ab3a-6bce652e2394-catalog-content\") pod \"b08b52de-e733-436f-ab3a-6bce652e2394\" (UID: \"b08b52de-e733-436f-ab3a-6bce652e2394\") " Dec 08 22:04:29 crc kubenswrapper[4912]: I1208 22:04:29.154722 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jshcv\" (UniqueName: \"kubernetes.io/projected/b08b52de-e733-436f-ab3a-6bce652e2394-kube-api-access-jshcv\") pod \"b08b52de-e733-436f-ab3a-6bce652e2394\" (UID: \"b08b52de-e733-436f-ab3a-6bce652e2394\") " Dec 08 22:04:29 crc kubenswrapper[4912]: I1208 22:04:29.158938 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b08b52de-e733-436f-ab3a-6bce652e2394-utilities" (OuterVolumeSpecName: "utilities") pod "b08b52de-e733-436f-ab3a-6bce652e2394" (UID: "b08b52de-e733-436f-ab3a-6bce652e2394"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:04:29 crc kubenswrapper[4912]: I1208 22:04:29.160133 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b08b52de-e733-436f-ab3a-6bce652e2394-kube-api-access-jshcv" (OuterVolumeSpecName: "kube-api-access-jshcv") pod "b08b52de-e733-436f-ab3a-6bce652e2394" (UID: "b08b52de-e733-436f-ab3a-6bce652e2394"). InnerVolumeSpecName "kube-api-access-jshcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:04:29 crc kubenswrapper[4912]: I1208 22:04:29.257247 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08b52de-e733-436f-ab3a-6bce652e2394-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:04:29 crc kubenswrapper[4912]: I1208 22:04:29.257589 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jshcv\" (UniqueName: \"kubernetes.io/projected/b08b52de-e733-436f-ab3a-6bce652e2394-kube-api-access-jshcv\") on node \"crc\" DevicePath \"\"" Dec 08 22:04:29 crc kubenswrapper[4912]: I1208 22:04:29.265594 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b08b52de-e733-436f-ab3a-6bce652e2394-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b08b52de-e733-436f-ab3a-6bce652e2394" (UID: "b08b52de-e733-436f-ab3a-6bce652e2394"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:04:29 crc kubenswrapper[4912]: I1208 22:04:29.370069 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08b52de-e733-436f-ab3a-6bce652e2394-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:04:30 crc kubenswrapper[4912]: I1208 22:04:30.011768 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sd75d" Dec 08 22:04:30 crc kubenswrapper[4912]: I1208 22:04:30.060483 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sd75d"] Dec 08 22:04:30 crc kubenswrapper[4912]: I1208 22:04:30.069119 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sd75d"] Dec 08 22:04:30 crc kubenswrapper[4912]: I1208 22:04:30.456224 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b08b52de-e733-436f-ab3a-6bce652e2394" path="/var/lib/kubelet/pods/b08b52de-e733-436f-ab3a-6bce652e2394/volumes" Dec 08 22:04:37 crc kubenswrapper[4912]: I1208 22:04:37.427591 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:04:37 crc kubenswrapper[4912]: E1208 22:04:37.428511 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:04:49 crc kubenswrapper[4912]: I1208 22:04:49.428643 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:04:49 crc kubenswrapper[4912]: E1208 22:04:49.430788 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:05:02 crc kubenswrapper[4912]: I1208 22:05:02.428763 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:05:02 crc kubenswrapper[4912]: E1208 22:05:02.429563 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:05:16 crc kubenswrapper[4912]: I1208 22:05:16.428313 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:05:16 crc kubenswrapper[4912]: E1208 22:05:16.429154 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:05:30 crc kubenswrapper[4912]: I1208 22:05:30.428212 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:05:30 crc kubenswrapper[4912]: E1208 22:05:30.428920 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:05:41 crc kubenswrapper[4912]: I1208 22:05:41.428752 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:05:41 crc kubenswrapper[4912]: E1208 22:05:41.429714 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.657598 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nwzs7"] Dec 08 22:05:50 crc kubenswrapper[4912]: E1208 22:05:50.658604 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b08b52de-e733-436f-ab3a-6bce652e2394" containerName="extract-content" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.658630 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="b08b52de-e733-436f-ab3a-6bce652e2394" containerName="extract-content" Dec 08 22:05:50 crc kubenswrapper[4912]: E1208 22:05:50.658660 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b08b52de-e733-436f-ab3a-6bce652e2394" containerName="registry-server" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.658670 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="b08b52de-e733-436f-ab3a-6bce652e2394" containerName="registry-server" Dec 08 22:05:50 crc kubenswrapper[4912]: E1208 22:05:50.658703 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b08b52de-e733-436f-ab3a-6bce652e2394" containerName="extract-utilities" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.658714 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="b08b52de-e733-436f-ab3a-6bce652e2394" containerName="extract-utilities" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.658943 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="b08b52de-e733-436f-ab3a-6bce652e2394" containerName="registry-server" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.660477 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.667832 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nwzs7"] Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.810008 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcw86\" (UniqueName: \"kubernetes.io/projected/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-kube-api-access-kcw86\") pod \"redhat-marketplace-nwzs7\" (UID: \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\") " pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.810181 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-utilities\") pod \"redhat-marketplace-nwzs7\" (UID: \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\") " pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.810547 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-catalog-content\") pod \"redhat-marketplace-nwzs7\" (UID: \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\") " pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.912518 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcw86\" (UniqueName: \"kubernetes.io/projected/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-kube-api-access-kcw86\") pod \"redhat-marketplace-nwzs7\" (UID: \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\") " pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.912589 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-utilities\") pod \"redhat-marketplace-nwzs7\" (UID: \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\") " pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.912701 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-catalog-content\") pod \"redhat-marketplace-nwzs7\" (UID: \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\") " pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.913466 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-utilities\") pod \"redhat-marketplace-nwzs7\" (UID: \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\") " pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.913482 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-catalog-content\") pod \"redhat-marketplace-nwzs7\" (UID: \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\") " pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.943810 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcw86\" (UniqueName: \"kubernetes.io/projected/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-kube-api-access-kcw86\") pod \"redhat-marketplace-nwzs7\" (UID: \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\") " pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:05:50 crc kubenswrapper[4912]: I1208 22:05:50.983862 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:05:51 crc kubenswrapper[4912]: I1208 22:05:51.616055 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nwzs7"] Dec 08 22:05:51 crc kubenswrapper[4912]: W1208 22:05:51.619819 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb5b65a7_07d2_4ce7_9ec6_eb28dfd2d413.slice/crio-b419216816b889a8be757f14d45b626f793c88f075b1a6e33a0bb8e462ac6472 WatchSource:0}: Error finding container b419216816b889a8be757f14d45b626f793c88f075b1a6e33a0bb8e462ac6472: Status 404 returned error can't find the container with id b419216816b889a8be757f14d45b626f793c88f075b1a6e33a0bb8e462ac6472 Dec 08 22:05:51 crc kubenswrapper[4912]: I1208 22:05:51.744852 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nwzs7" event={"ID":"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413","Type":"ContainerStarted","Data":"b419216816b889a8be757f14d45b626f793c88f075b1a6e33a0bb8e462ac6472"} Dec 08 22:05:52 crc kubenswrapper[4912]: I1208 22:05:52.755976 4912 generic.go:334] "Generic (PLEG): container finished" podID="fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" containerID="7abfbda11663e2aa3cf0938be802953b4c7b0aae149e28b9dc579d606a51d302" exitCode=0 Dec 08 22:05:52 crc kubenswrapper[4912]: I1208 22:05:52.756076 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nwzs7" event={"ID":"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413","Type":"ContainerDied","Data":"7abfbda11663e2aa3cf0938be802953b4c7b0aae149e28b9dc579d606a51d302"} Dec 08 22:05:53 crc kubenswrapper[4912]: I1208 22:05:53.428816 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:05:53 crc kubenswrapper[4912]: E1208 22:05:53.429301 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:05:53 crc kubenswrapper[4912]: I1208 22:05:53.766929 4912 generic.go:334] "Generic (PLEG): container finished" podID="fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" containerID="04fd9ddc5e2fbbe57eb5aff3341ba271f9bae339490732bb6938400f35b4d89f" exitCode=0 Dec 08 22:05:53 crc kubenswrapper[4912]: I1208 22:05:53.766982 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nwzs7" event={"ID":"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413","Type":"ContainerDied","Data":"04fd9ddc5e2fbbe57eb5aff3341ba271f9bae339490732bb6938400f35b4d89f"} Dec 08 22:05:54 crc kubenswrapper[4912]: I1208 22:05:54.777582 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nwzs7" event={"ID":"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413","Type":"ContainerStarted","Data":"350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7"} Dec 08 22:05:54 crc kubenswrapper[4912]: I1208 22:05:54.804447 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nwzs7" podStartSLOduration=3.399669568 podStartE2EDuration="4.804409176s" podCreationTimestamp="2025-12-08 22:05:50 +0000 UTC" firstStartedPulling="2025-12-08 22:05:52.759504924 +0000 UTC m=+2834.622507007" lastFinishedPulling="2025-12-08 22:05:54.164244532 +0000 UTC m=+2836.027246615" observedRunningTime="2025-12-08 22:05:54.794800056 +0000 UTC m=+2836.657802139" watchObservedRunningTime="2025-12-08 22:05:54.804409176 +0000 UTC m=+2836.667411259" Dec 08 22:06:00 crc kubenswrapper[4912]: I1208 22:06:00.984210 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:06:00 crc kubenswrapper[4912]: I1208 22:06:00.984758 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:06:01 crc kubenswrapper[4912]: I1208 22:06:01.032573 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:06:01 crc kubenswrapper[4912]: I1208 22:06:01.951172 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:06:02 crc kubenswrapper[4912]: I1208 22:06:02.059456 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nwzs7"] Dec 08 22:06:02 crc kubenswrapper[4912]: I1208 22:06:02.965470 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:06:02 crc kubenswrapper[4912]: I1208 22:06:02.965802 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:06:03 crc kubenswrapper[4912]: I1208 22:06:03.860421 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nwzs7" podUID="fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" containerName="registry-server" containerID="cri-o://350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7" gracePeriod=2 Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.319636 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.484956 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcw86\" (UniqueName: \"kubernetes.io/projected/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-kube-api-access-kcw86\") pod \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\" (UID: \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\") " Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.485265 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-utilities\") pod \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\" (UID: \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\") " Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.485397 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-catalog-content\") pod \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\" (UID: \"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413\") " Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.486239 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-utilities" (OuterVolumeSpecName: "utilities") pod "fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" (UID: "fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.491277 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-kube-api-access-kcw86" (OuterVolumeSpecName: "kube-api-access-kcw86") pod "fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" (UID: "fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413"). InnerVolumeSpecName "kube-api-access-kcw86". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.515184 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" (UID: "fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.589661 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcw86\" (UniqueName: \"kubernetes.io/projected/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-kube-api-access-kcw86\") on node \"crc\" DevicePath \"\"" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.589700 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.589710 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.874350 4912 generic.go:334] "Generic (PLEG): container finished" podID="fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" containerID="350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7" exitCode=0 Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.874407 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nwzs7" event={"ID":"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413","Type":"ContainerDied","Data":"350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7"} Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.874444 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nwzs7" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.874468 4912 scope.go:117] "RemoveContainer" containerID="350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.874452 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nwzs7" event={"ID":"fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413","Type":"ContainerDied","Data":"b419216816b889a8be757f14d45b626f793c88f075b1a6e33a0bb8e462ac6472"} Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.899339 4912 scope.go:117] "RemoveContainer" containerID="04fd9ddc5e2fbbe57eb5aff3341ba271f9bae339490732bb6938400f35b4d89f" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.917851 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nwzs7"] Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.926014 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nwzs7"] Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.935080 4912 scope.go:117] "RemoveContainer" containerID="7abfbda11663e2aa3cf0938be802953b4c7b0aae149e28b9dc579d606a51d302" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.966778 4912 scope.go:117] "RemoveContainer" containerID="350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7" Dec 08 22:06:04 crc kubenswrapper[4912]: E1208 22:06:04.967542 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7\": container with ID starting with 350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7 not found: ID does not exist" containerID="350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.967604 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7"} err="failed to get container status \"350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7\": rpc error: code = NotFound desc = could not find container \"350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7\": container with ID starting with 350a37ea5f615d5e7335aea4f812e2aa9b8fc5e4166732497d0b20375f278af7 not found: ID does not exist" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.967640 4912 scope.go:117] "RemoveContainer" containerID="04fd9ddc5e2fbbe57eb5aff3341ba271f9bae339490732bb6938400f35b4d89f" Dec 08 22:06:04 crc kubenswrapper[4912]: E1208 22:06:04.968016 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04fd9ddc5e2fbbe57eb5aff3341ba271f9bae339490732bb6938400f35b4d89f\": container with ID starting with 04fd9ddc5e2fbbe57eb5aff3341ba271f9bae339490732bb6938400f35b4d89f not found: ID does not exist" containerID="04fd9ddc5e2fbbe57eb5aff3341ba271f9bae339490732bb6938400f35b4d89f" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.968073 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04fd9ddc5e2fbbe57eb5aff3341ba271f9bae339490732bb6938400f35b4d89f"} err="failed to get container status \"04fd9ddc5e2fbbe57eb5aff3341ba271f9bae339490732bb6938400f35b4d89f\": rpc error: code = NotFound desc = could not find container \"04fd9ddc5e2fbbe57eb5aff3341ba271f9bae339490732bb6938400f35b4d89f\": container with ID starting with 04fd9ddc5e2fbbe57eb5aff3341ba271f9bae339490732bb6938400f35b4d89f not found: ID does not exist" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.968120 4912 scope.go:117] "RemoveContainer" containerID="7abfbda11663e2aa3cf0938be802953b4c7b0aae149e28b9dc579d606a51d302" Dec 08 22:06:04 crc kubenswrapper[4912]: E1208 22:06:04.968497 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7abfbda11663e2aa3cf0938be802953b4c7b0aae149e28b9dc579d606a51d302\": container with ID starting with 7abfbda11663e2aa3cf0938be802953b4c7b0aae149e28b9dc579d606a51d302 not found: ID does not exist" containerID="7abfbda11663e2aa3cf0938be802953b4c7b0aae149e28b9dc579d606a51d302" Dec 08 22:06:04 crc kubenswrapper[4912]: I1208 22:06:04.968565 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7abfbda11663e2aa3cf0938be802953b4c7b0aae149e28b9dc579d606a51d302"} err="failed to get container status \"7abfbda11663e2aa3cf0938be802953b4c7b0aae149e28b9dc579d606a51d302\": rpc error: code = NotFound desc = could not find container \"7abfbda11663e2aa3cf0938be802953b4c7b0aae149e28b9dc579d606a51d302\": container with ID starting with 7abfbda11663e2aa3cf0938be802953b4c7b0aae149e28b9dc579d606a51d302 not found: ID does not exist" Dec 08 22:06:06 crc kubenswrapper[4912]: I1208 22:06:06.439282 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" path="/var/lib/kubelet/pods/fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413/volumes" Dec 08 22:06:07 crc kubenswrapper[4912]: I1208 22:06:07.427556 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:06:07 crc kubenswrapper[4912]: E1208 22:06:07.428028 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:06:22 crc kubenswrapper[4912]: I1208 22:06:22.427744 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:06:23 crc kubenswrapper[4912]: I1208 22:06:23.023538 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerStarted","Data":"b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb"} Dec 08 22:06:23 crc kubenswrapper[4912]: I1208 22:06:23.024382 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.194635 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fvbph"] Dec 08 22:06:25 crc kubenswrapper[4912]: E1208 22:06:25.195025 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" containerName="extract-content" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.195056 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" containerName="extract-content" Dec 08 22:06:25 crc kubenswrapper[4912]: E1208 22:06:25.195091 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" containerName="extract-utilities" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.195098 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" containerName="extract-utilities" Dec 08 22:06:25 crc kubenswrapper[4912]: E1208 22:06:25.195116 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" containerName="registry-server" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.195122 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" containerName="registry-server" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.195314 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb5b65a7-07d2-4ce7-9ec6-eb28dfd2d413" containerName="registry-server" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.196597 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.210074 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fvbph"] Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.273203 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngbsz\" (UniqueName: \"kubernetes.io/projected/518c2aa9-b6d8-49c4-9e4c-f6731eef84f2-kube-api-access-ngbsz\") pod \"certified-operators-fvbph\" (UID: \"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2\") " pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.273395 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/518c2aa9-b6d8-49c4-9e4c-f6731eef84f2-catalog-content\") pod \"certified-operators-fvbph\" (UID: \"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2\") " pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.273613 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/518c2aa9-b6d8-49c4-9e4c-f6731eef84f2-utilities\") pod \"certified-operators-fvbph\" (UID: \"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2\") " pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.374755 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngbsz\" (UniqueName: \"kubernetes.io/projected/518c2aa9-b6d8-49c4-9e4c-f6731eef84f2-kube-api-access-ngbsz\") pod \"certified-operators-fvbph\" (UID: \"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2\") " pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.374834 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/518c2aa9-b6d8-49c4-9e4c-f6731eef84f2-catalog-content\") pod \"certified-operators-fvbph\" (UID: \"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2\") " pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.374889 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/518c2aa9-b6d8-49c4-9e4c-f6731eef84f2-utilities\") pod \"certified-operators-fvbph\" (UID: \"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2\") " pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.375382 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/518c2aa9-b6d8-49c4-9e4c-f6731eef84f2-utilities\") pod \"certified-operators-fvbph\" (UID: \"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2\") " pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.375532 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/518c2aa9-b6d8-49c4-9e4c-f6731eef84f2-catalog-content\") pod \"certified-operators-fvbph\" (UID: \"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2\") " pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.393300 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngbsz\" (UniqueName: \"kubernetes.io/projected/518c2aa9-b6d8-49c4-9e4c-f6731eef84f2-kube-api-access-ngbsz\") pod \"certified-operators-fvbph\" (UID: \"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2\") " pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.525532 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:25 crc kubenswrapper[4912]: I1208 22:06:25.988342 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fvbph"] Dec 08 22:06:25 crc kubenswrapper[4912]: W1208 22:06:25.999598 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod518c2aa9_b6d8_49c4_9e4c_f6731eef84f2.slice/crio-a542d063b23a382f7ba89415522ff98afbb7ed0d83cd1abcb0c8c5b03d0e4de4 WatchSource:0}: Error finding container a542d063b23a382f7ba89415522ff98afbb7ed0d83cd1abcb0c8c5b03d0e4de4: Status 404 returned error can't find the container with id a542d063b23a382f7ba89415522ff98afbb7ed0d83cd1abcb0c8c5b03d0e4de4 Dec 08 22:06:26 crc kubenswrapper[4912]: I1208 22:06:26.054020 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvbph" event={"ID":"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2","Type":"ContainerStarted","Data":"a542d063b23a382f7ba89415522ff98afbb7ed0d83cd1abcb0c8c5b03d0e4de4"} Dec 08 22:06:27 crc kubenswrapper[4912]: I1208 22:06:27.066905 4912 generic.go:334] "Generic (PLEG): container finished" podID="518c2aa9-b6d8-49c4-9e4c-f6731eef84f2" containerID="e7d82410d62072c7d08fe5b3dcd0010fa970c69e50e88c4f7e6874e6cf667ac0" exitCode=0 Dec 08 22:06:27 crc kubenswrapper[4912]: I1208 22:06:27.067016 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvbph" event={"ID":"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2","Type":"ContainerDied","Data":"e7d82410d62072c7d08fe5b3dcd0010fa970c69e50e88c4f7e6874e6cf667ac0"} Dec 08 22:06:27 crc kubenswrapper[4912]: I1208 22:06:27.069199 4912 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 22:06:31 crc kubenswrapper[4912]: I1208 22:06:31.101385 4912 generic.go:334] "Generic (PLEG): container finished" podID="518c2aa9-b6d8-49c4-9e4c-f6731eef84f2" containerID="073142cc206400968feb56bd1a599c9fa339e187f178a810ee1a1cb5471f32a5" exitCode=0 Dec 08 22:06:31 crc kubenswrapper[4912]: I1208 22:06:31.101812 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvbph" event={"ID":"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2","Type":"ContainerDied","Data":"073142cc206400968feb56bd1a599c9fa339e187f178a810ee1a1cb5471f32a5"} Dec 08 22:06:32 crc kubenswrapper[4912]: I1208 22:06:32.965194 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:06:32 crc kubenswrapper[4912]: I1208 22:06:32.965750 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:06:33 crc kubenswrapper[4912]: I1208 22:06:33.121876 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvbph" event={"ID":"518c2aa9-b6d8-49c4-9e4c-f6731eef84f2","Type":"ContainerStarted","Data":"b81420a830bb08f0f31f6489e06e6d4439d548edff0bc71c0a7fd3faed92b587"} Dec 08 22:06:33 crc kubenswrapper[4912]: I1208 22:06:33.142771 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fvbph" podStartSLOduration=3.243136715 podStartE2EDuration="8.142754601s" podCreationTimestamp="2025-12-08 22:06:25 +0000 UTC" firstStartedPulling="2025-12-08 22:06:27.068905707 +0000 UTC m=+2868.931907790" lastFinishedPulling="2025-12-08 22:06:31.968523593 +0000 UTC m=+2873.831525676" observedRunningTime="2025-12-08 22:06:33.139456758 +0000 UTC m=+2875.002458841" watchObservedRunningTime="2025-12-08 22:06:33.142754601 +0000 UTC m=+2875.005756684" Dec 08 22:06:35 crc kubenswrapper[4912]: I1208 22:06:35.251822 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 22:06:35 crc kubenswrapper[4912]: I1208 22:06:35.526543 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:35 crc kubenswrapper[4912]: I1208 22:06:35.526889 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:35 crc kubenswrapper[4912]: I1208 22:06:35.579499 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:45 crc kubenswrapper[4912]: I1208 22:06:45.570349 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fvbph" Dec 08 22:06:45 crc kubenswrapper[4912]: I1208 22:06:45.641254 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fvbph"] Dec 08 22:06:45 crc kubenswrapper[4912]: I1208 22:06:45.695342 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rmqkp"] Dec 08 22:06:45 crc kubenswrapper[4912]: I1208 22:06:45.695609 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rmqkp" podUID="744551ec-9627-4b91-a16a-ed982eaacdae" containerName="registry-server" containerID="cri-o://5b5d2aefb793e33af53408b6effe29c03e37c00311e7d368a0382abc31079962" gracePeriod=2 Dec 08 22:06:46 crc kubenswrapper[4912]: I1208 22:06:46.253608 4912 generic.go:334] "Generic (PLEG): container finished" podID="744551ec-9627-4b91-a16a-ed982eaacdae" containerID="5b5d2aefb793e33af53408b6effe29c03e37c00311e7d368a0382abc31079962" exitCode=0 Dec 08 22:06:46 crc kubenswrapper[4912]: I1208 22:06:46.253804 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmqkp" event={"ID":"744551ec-9627-4b91-a16a-ed982eaacdae","Type":"ContainerDied","Data":"5b5d2aefb793e33af53408b6effe29c03e37c00311e7d368a0382abc31079962"} Dec 08 22:06:46 crc kubenswrapper[4912]: I1208 22:06:46.365884 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 22:06:46 crc kubenswrapper[4912]: I1208 22:06:46.549098 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvmwg\" (UniqueName: \"kubernetes.io/projected/744551ec-9627-4b91-a16a-ed982eaacdae-kube-api-access-wvmwg\") pod \"744551ec-9627-4b91-a16a-ed982eaacdae\" (UID: \"744551ec-9627-4b91-a16a-ed982eaacdae\") " Dec 08 22:06:46 crc kubenswrapper[4912]: I1208 22:06:46.549168 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/744551ec-9627-4b91-a16a-ed982eaacdae-utilities\") pod \"744551ec-9627-4b91-a16a-ed982eaacdae\" (UID: \"744551ec-9627-4b91-a16a-ed982eaacdae\") " Dec 08 22:06:46 crc kubenswrapper[4912]: I1208 22:06:46.549452 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/744551ec-9627-4b91-a16a-ed982eaacdae-catalog-content\") pod \"744551ec-9627-4b91-a16a-ed982eaacdae\" (UID: \"744551ec-9627-4b91-a16a-ed982eaacdae\") " Dec 08 22:06:46 crc kubenswrapper[4912]: I1208 22:06:46.550107 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/744551ec-9627-4b91-a16a-ed982eaacdae-utilities" (OuterVolumeSpecName: "utilities") pod "744551ec-9627-4b91-a16a-ed982eaacdae" (UID: "744551ec-9627-4b91-a16a-ed982eaacdae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:06:46 crc kubenswrapper[4912]: I1208 22:06:46.550424 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/744551ec-9627-4b91-a16a-ed982eaacdae-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:06:46 crc kubenswrapper[4912]: I1208 22:06:46.557462 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/744551ec-9627-4b91-a16a-ed982eaacdae-kube-api-access-wvmwg" (OuterVolumeSpecName: "kube-api-access-wvmwg") pod "744551ec-9627-4b91-a16a-ed982eaacdae" (UID: "744551ec-9627-4b91-a16a-ed982eaacdae"). InnerVolumeSpecName "kube-api-access-wvmwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:06:46 crc kubenswrapper[4912]: I1208 22:06:46.612233 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/744551ec-9627-4b91-a16a-ed982eaacdae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "744551ec-9627-4b91-a16a-ed982eaacdae" (UID: "744551ec-9627-4b91-a16a-ed982eaacdae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:06:46 crc kubenswrapper[4912]: I1208 22:06:46.652701 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/744551ec-9627-4b91-a16a-ed982eaacdae-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:06:46 crc kubenswrapper[4912]: I1208 22:06:46.652752 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvmwg\" (UniqueName: \"kubernetes.io/projected/744551ec-9627-4b91-a16a-ed982eaacdae-kube-api-access-wvmwg\") on node \"crc\" DevicePath \"\"" Dec 08 22:06:47 crc kubenswrapper[4912]: I1208 22:06:47.269948 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmqkp" event={"ID":"744551ec-9627-4b91-a16a-ed982eaacdae","Type":"ContainerDied","Data":"9ad123d422ad5e601d3ac5dbfbf88343ec2af17b1b2107541bf253a031ff78f4"} Dec 08 22:06:47 crc kubenswrapper[4912]: I1208 22:06:47.270065 4912 scope.go:117] "RemoveContainer" containerID="5b5d2aefb793e33af53408b6effe29c03e37c00311e7d368a0382abc31079962" Dec 08 22:06:47 crc kubenswrapper[4912]: I1208 22:06:47.270365 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rmqkp" Dec 08 22:06:47 crc kubenswrapper[4912]: I1208 22:06:47.305632 4912 scope.go:117] "RemoveContainer" containerID="68c8e39157c973cc2bef1c82c1cbfa3236a2d67a972f510fe81c92a48d966f8a" Dec 08 22:06:47 crc kubenswrapper[4912]: I1208 22:06:47.333075 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rmqkp"] Dec 08 22:06:47 crc kubenswrapper[4912]: I1208 22:06:47.350248 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rmqkp"] Dec 08 22:06:47 crc kubenswrapper[4912]: I1208 22:06:47.351222 4912 scope.go:117] "RemoveContainer" containerID="5d0741c442423d00fdd3e32383975287ad8d8b2e2d1d2effaaa0823ba7355c73" Dec 08 22:06:48 crc kubenswrapper[4912]: I1208 22:06:48.443443 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="744551ec-9627-4b91-a16a-ed982eaacdae" path="/var/lib/kubelet/pods/744551ec-9627-4b91-a16a-ed982eaacdae/volumes" Dec 08 22:07:02 crc kubenswrapper[4912]: I1208 22:07:02.965194 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:07:02 crc kubenswrapper[4912]: I1208 22:07:02.965921 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:07:02 crc kubenswrapper[4912]: I1208 22:07:02.965996 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 22:07:02 crc kubenswrapper[4912]: I1208 22:07:02.967264 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"62edc3736393a4509d2a8daec91bd6cfd277ae4e59c18698707b365e0211b0d3"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 22:07:02 crc kubenswrapper[4912]: I1208 22:07:02.967351 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://62edc3736393a4509d2a8daec91bd6cfd277ae4e59c18698707b365e0211b0d3" gracePeriod=600 Dec 08 22:07:03 crc kubenswrapper[4912]: I1208 22:07:03.488772 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="62edc3736393a4509d2a8daec91bd6cfd277ae4e59c18698707b365e0211b0d3" exitCode=0 Dec 08 22:07:03 crc kubenswrapper[4912]: I1208 22:07:03.488916 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"62edc3736393a4509d2a8daec91bd6cfd277ae4e59c18698707b365e0211b0d3"} Dec 08 22:07:03 crc kubenswrapper[4912]: I1208 22:07:03.489140 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4"} Dec 08 22:07:03 crc kubenswrapper[4912]: I1208 22:07:03.489171 4912 scope.go:117] "RemoveContainer" containerID="f37a3c3612f0edb6a0beaa66c3ff9b2aeb83fa10034fd1ecd074bfbb62561be7" Dec 08 22:08:58 crc kubenswrapper[4912]: I1208 22:08:58.531015 4912 generic.go:334] "Generic (PLEG): container finished" podID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" exitCode=1 Dec 08 22:08:58 crc kubenswrapper[4912]: I1208 22:08:58.531303 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerDied","Data":"b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb"} Dec 08 22:08:58 crc kubenswrapper[4912]: I1208 22:08:58.531699 4912 scope.go:117] "RemoveContainer" containerID="741a4d3cb3bacc5e4da9a4ad4506a873f7f8232bc8df91ae4fe0b4b22fbc3573" Dec 08 22:08:58 crc kubenswrapper[4912]: I1208 22:08:58.532549 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:08:58 crc kubenswrapper[4912]: E1208 22:08:58.532872 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:09:05 crc kubenswrapper[4912]: I1208 22:09:05.249609 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 22:09:05 crc kubenswrapper[4912]: I1208 22:09:05.250274 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 22:09:05 crc kubenswrapper[4912]: I1208 22:09:05.251174 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:09:05 crc kubenswrapper[4912]: E1208 22:09:05.251486 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:09:19 crc kubenswrapper[4912]: I1208 22:09:19.427892 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:09:19 crc kubenswrapper[4912]: E1208 22:09:19.428712 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:09:30 crc kubenswrapper[4912]: I1208 22:09:30.427896 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:09:30 crc kubenswrapper[4912]: E1208 22:09:30.428691 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:09:32 crc kubenswrapper[4912]: I1208 22:09:32.965452 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:09:32 crc kubenswrapper[4912]: I1208 22:09:32.966055 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:09:45 crc kubenswrapper[4912]: I1208 22:09:45.427650 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:09:45 crc kubenswrapper[4912]: E1208 22:09:45.428513 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:09:58 crc kubenswrapper[4912]: I1208 22:09:58.444118 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:09:58 crc kubenswrapper[4912]: E1208 22:09:58.444983 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:10:02 crc kubenswrapper[4912]: I1208 22:10:02.965306 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:10:02 crc kubenswrapper[4912]: I1208 22:10:02.965847 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.428467 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:10:10 crc kubenswrapper[4912]: E1208 22:10:10.430668 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.746458 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-2jsnr/must-gather-ll99v"] Dec 08 22:10:10 crc kubenswrapper[4912]: E1208 22:10:10.747290 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="744551ec-9627-4b91-a16a-ed982eaacdae" containerName="registry-server" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.747317 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="744551ec-9627-4b91-a16a-ed982eaacdae" containerName="registry-server" Dec 08 22:10:10 crc kubenswrapper[4912]: E1208 22:10:10.747342 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="744551ec-9627-4b91-a16a-ed982eaacdae" containerName="extract-content" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.747350 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="744551ec-9627-4b91-a16a-ed982eaacdae" containerName="extract-content" Dec 08 22:10:10 crc kubenswrapper[4912]: E1208 22:10:10.747367 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="744551ec-9627-4b91-a16a-ed982eaacdae" containerName="extract-utilities" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.747375 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="744551ec-9627-4b91-a16a-ed982eaacdae" containerName="extract-utilities" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.747632 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="744551ec-9627-4b91-a16a-ed982eaacdae" containerName="registry-server" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.749075 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2jsnr/must-gather-ll99v" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.752405 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-2jsnr"/"openshift-service-ca.crt" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.752488 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-2jsnr"/"kube-root-ca.crt" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.768613 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-2jsnr/must-gather-ll99v"] Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.838672 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701-must-gather-output\") pod \"must-gather-ll99v\" (UID: \"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701\") " pod="openshift-must-gather-2jsnr/must-gather-ll99v" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.838769 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn6b4\" (UniqueName: \"kubernetes.io/projected/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701-kube-api-access-jn6b4\") pod \"must-gather-ll99v\" (UID: \"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701\") " pod="openshift-must-gather-2jsnr/must-gather-ll99v" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.940649 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701-must-gather-output\") pod \"must-gather-ll99v\" (UID: \"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701\") " pod="openshift-must-gather-2jsnr/must-gather-ll99v" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.940778 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn6b4\" (UniqueName: \"kubernetes.io/projected/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701-kube-api-access-jn6b4\") pod \"must-gather-ll99v\" (UID: \"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701\") " pod="openshift-must-gather-2jsnr/must-gather-ll99v" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.941178 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701-must-gather-output\") pod \"must-gather-ll99v\" (UID: \"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701\") " pod="openshift-must-gather-2jsnr/must-gather-ll99v" Dec 08 22:10:10 crc kubenswrapper[4912]: I1208 22:10:10.969863 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn6b4\" (UniqueName: \"kubernetes.io/projected/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701-kube-api-access-jn6b4\") pod \"must-gather-ll99v\" (UID: \"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701\") " pod="openshift-must-gather-2jsnr/must-gather-ll99v" Dec 08 22:10:11 crc kubenswrapper[4912]: I1208 22:10:11.071144 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2jsnr/must-gather-ll99v" Dec 08 22:10:11 crc kubenswrapper[4912]: I1208 22:10:11.589595 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-2jsnr/must-gather-ll99v"] Dec 08 22:10:12 crc kubenswrapper[4912]: I1208 22:10:12.346396 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2jsnr/must-gather-ll99v" event={"ID":"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701","Type":"ContainerStarted","Data":"7ba0013a92638027b85aaddd39c349a0a80257ff800a01e6b8ba687d71852114"} Dec 08 22:10:20 crc kubenswrapper[4912]: I1208 22:10:20.443361 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2jsnr/must-gather-ll99v" event={"ID":"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701","Type":"ContainerStarted","Data":"57fe59624028f48a8a91704acccd6a8c12927addaab75881d25b81db1b8cca77"} Dec 08 22:10:20 crc kubenswrapper[4912]: I1208 22:10:20.443888 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2jsnr/must-gather-ll99v" event={"ID":"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701","Type":"ContainerStarted","Data":"cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd"} Dec 08 22:10:20 crc kubenswrapper[4912]: I1208 22:10:20.463433 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-2jsnr/must-gather-ll99v" podStartSLOduration=2.209815461 podStartE2EDuration="10.463385562s" podCreationTimestamp="2025-12-08 22:10:10 +0000 UTC" firstStartedPulling="2025-12-08 22:10:11.593120069 +0000 UTC m=+3093.456122152" lastFinishedPulling="2025-12-08 22:10:19.84669017 +0000 UTC m=+3101.709692253" observedRunningTime="2025-12-08 22:10:20.46166986 +0000 UTC m=+3102.324671943" watchObservedRunningTime="2025-12-08 22:10:20.463385562 +0000 UTC m=+3102.326387645" Dec 08 22:10:21 crc kubenswrapper[4912]: I1208 22:10:21.428136 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:10:21 crc kubenswrapper[4912]: E1208 22:10:21.428772 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:10:24 crc kubenswrapper[4912]: I1208 22:10:24.663758 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-2jsnr/crc-debug-hmztw"] Dec 08 22:10:24 crc kubenswrapper[4912]: I1208 22:10:24.665477 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2jsnr/crc-debug-hmztw" Dec 08 22:10:24 crc kubenswrapper[4912]: I1208 22:10:24.668015 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-2jsnr"/"default-dockercfg-fc4km" Dec 08 22:10:24 crc kubenswrapper[4912]: I1208 22:10:24.786110 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnkzc\" (UniqueName: \"kubernetes.io/projected/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7-kube-api-access-jnkzc\") pod \"crc-debug-hmztw\" (UID: \"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7\") " pod="openshift-must-gather-2jsnr/crc-debug-hmztw" Dec 08 22:10:24 crc kubenswrapper[4912]: I1208 22:10:24.786412 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7-host\") pod \"crc-debug-hmztw\" (UID: \"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7\") " pod="openshift-must-gather-2jsnr/crc-debug-hmztw" Dec 08 22:10:24 crc kubenswrapper[4912]: I1208 22:10:24.888650 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnkzc\" (UniqueName: \"kubernetes.io/projected/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7-kube-api-access-jnkzc\") pod \"crc-debug-hmztw\" (UID: \"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7\") " pod="openshift-must-gather-2jsnr/crc-debug-hmztw" Dec 08 22:10:24 crc kubenswrapper[4912]: I1208 22:10:24.888730 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7-host\") pod \"crc-debug-hmztw\" (UID: \"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7\") " pod="openshift-must-gather-2jsnr/crc-debug-hmztw" Dec 08 22:10:24 crc kubenswrapper[4912]: I1208 22:10:24.889002 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7-host\") pod \"crc-debug-hmztw\" (UID: \"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7\") " pod="openshift-must-gather-2jsnr/crc-debug-hmztw" Dec 08 22:10:24 crc kubenswrapper[4912]: I1208 22:10:24.908745 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnkzc\" (UniqueName: \"kubernetes.io/projected/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7-kube-api-access-jnkzc\") pod \"crc-debug-hmztw\" (UID: \"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7\") " pod="openshift-must-gather-2jsnr/crc-debug-hmztw" Dec 08 22:10:24 crc kubenswrapper[4912]: I1208 22:10:24.985389 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2jsnr/crc-debug-hmztw" Dec 08 22:10:25 crc kubenswrapper[4912]: W1208 22:10:25.032677 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5aa4aa90_aa5c_41cc_abc2_2cdad98386d7.slice/crio-03d7c2844c9170deb0478539e5807004f2bfcd2ee29a95755373af1654451232 WatchSource:0}: Error finding container 03d7c2844c9170deb0478539e5807004f2bfcd2ee29a95755373af1654451232: Status 404 returned error can't find the container with id 03d7c2844c9170deb0478539e5807004f2bfcd2ee29a95755373af1654451232 Dec 08 22:10:25 crc kubenswrapper[4912]: I1208 22:10:25.485839 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2jsnr/crc-debug-hmztw" event={"ID":"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7","Type":"ContainerStarted","Data":"03d7c2844c9170deb0478539e5807004f2bfcd2ee29a95755373af1654451232"} Dec 08 22:10:32 crc kubenswrapper[4912]: I1208 22:10:32.965375 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:10:32 crc kubenswrapper[4912]: I1208 22:10:32.965997 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:10:32 crc kubenswrapper[4912]: I1208 22:10:32.966079 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 22:10:32 crc kubenswrapper[4912]: I1208 22:10:32.966995 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 22:10:32 crc kubenswrapper[4912]: I1208 22:10:32.967112 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" gracePeriod=600 Dec 08 22:10:33 crc kubenswrapper[4912]: I1208 22:10:33.561165 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" exitCode=0 Dec 08 22:10:33 crc kubenswrapper[4912]: I1208 22:10:33.561243 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4"} Dec 08 22:10:33 crc kubenswrapper[4912]: I1208 22:10:33.561682 4912 scope.go:117] "RemoveContainer" containerID="62edc3736393a4509d2a8daec91bd6cfd277ae4e59c18698707b365e0211b0d3" Dec 08 22:10:36 crc kubenswrapper[4912]: I1208 22:10:36.427706 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:10:36 crc kubenswrapper[4912]: E1208 22:10:36.428470 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:10:37 crc kubenswrapper[4912]: E1208 22:10:37.072573 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:10:37 crc kubenswrapper[4912]: I1208 22:10:37.600655 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:10:37 crc kubenswrapper[4912]: E1208 22:10:37.601517 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:10:37 crc kubenswrapper[4912]: I1208 22:10:37.603889 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2jsnr/crc-debug-hmztw" event={"ID":"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7","Type":"ContainerStarted","Data":"08723d7734d63d2a0bc2a0e04c5f394a05ce674b95b716f726481c4910c1c90b"} Dec 08 22:10:37 crc kubenswrapper[4912]: I1208 22:10:37.703318 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-2jsnr/crc-debug-hmztw" podStartSLOduration=1.569625026 podStartE2EDuration="13.703301778s" podCreationTimestamp="2025-12-08 22:10:24 +0000 UTC" firstStartedPulling="2025-12-08 22:10:25.036692148 +0000 UTC m=+3106.899694231" lastFinishedPulling="2025-12-08 22:10:37.1703689 +0000 UTC m=+3119.033370983" observedRunningTime="2025-12-08 22:10:37.701272079 +0000 UTC m=+3119.564274162" watchObservedRunningTime="2025-12-08 22:10:37.703301778 +0000 UTC m=+3119.566303861" Dec 08 22:10:45 crc kubenswrapper[4912]: I1208 22:10:45.438293 4912 scope.go:117] "RemoveContainer" containerID="cbe35671e4c4b1f026b5fc860cf1da52614e0412606a97dc77e80d6ac6412f32" Dec 08 22:10:50 crc kubenswrapper[4912]: I1208 22:10:50.430130 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:10:50 crc kubenswrapper[4912]: E1208 22:10:50.430928 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:10:50 crc kubenswrapper[4912]: I1208 22:10:50.874923 4912 scope.go:117] "RemoveContainer" containerID="b8b3128f933dae680b66aa78311409367fd19c8229b5f7090280226e9cb16cf7" Dec 08 22:10:50 crc kubenswrapper[4912]: I1208 22:10:50.899577 4912 scope.go:117] "RemoveContainer" containerID="37c3d5c3a0cd87f52652dbcab122a5d021c61d483a05387ddf23479c07a92dc7" Dec 08 22:10:52 crc kubenswrapper[4912]: I1208 22:10:52.427915 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:10:52 crc kubenswrapper[4912]: E1208 22:10:52.428694 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:10:59 crc kubenswrapper[4912]: I1208 22:10:59.839337 4912 generic.go:334] "Generic (PLEG): container finished" podID="5aa4aa90-aa5c-41cc-abc2-2cdad98386d7" containerID="08723d7734d63d2a0bc2a0e04c5f394a05ce674b95b716f726481c4910c1c90b" exitCode=0 Dec 08 22:10:59 crc kubenswrapper[4912]: I1208 22:10:59.839431 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2jsnr/crc-debug-hmztw" event={"ID":"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7","Type":"ContainerDied","Data":"08723d7734d63d2a0bc2a0e04c5f394a05ce674b95b716f726481c4910c1c90b"} Dec 08 22:11:00 crc kubenswrapper[4912]: I1208 22:11:00.962113 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2jsnr/crc-debug-hmztw" Dec 08 22:11:01 crc kubenswrapper[4912]: I1208 22:11:01.001967 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-2jsnr/crc-debug-hmztw"] Dec 08 22:11:01 crc kubenswrapper[4912]: I1208 22:11:01.016763 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-2jsnr/crc-debug-hmztw"] Dec 08 22:11:01 crc kubenswrapper[4912]: I1208 22:11:01.074855 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7-host\") pod \"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7\" (UID: \"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7\") " Dec 08 22:11:01 crc kubenswrapper[4912]: I1208 22:11:01.074928 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnkzc\" (UniqueName: \"kubernetes.io/projected/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7-kube-api-access-jnkzc\") pod \"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7\" (UID: \"5aa4aa90-aa5c-41cc-abc2-2cdad98386d7\") " Dec 08 22:11:01 crc kubenswrapper[4912]: I1208 22:11:01.075029 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7-host" (OuterVolumeSpecName: "host") pod "5aa4aa90-aa5c-41cc-abc2-2cdad98386d7" (UID: "5aa4aa90-aa5c-41cc-abc2-2cdad98386d7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 22:11:01 crc kubenswrapper[4912]: I1208 22:11:01.075418 4912 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7-host\") on node \"crc\" DevicePath \"\"" Dec 08 22:11:01 crc kubenswrapper[4912]: I1208 22:11:01.091326 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7-kube-api-access-jnkzc" (OuterVolumeSpecName: "kube-api-access-jnkzc") pod "5aa4aa90-aa5c-41cc-abc2-2cdad98386d7" (UID: "5aa4aa90-aa5c-41cc-abc2-2cdad98386d7"). InnerVolumeSpecName "kube-api-access-jnkzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:11:01 crc kubenswrapper[4912]: I1208 22:11:01.177479 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnkzc\" (UniqueName: \"kubernetes.io/projected/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7-kube-api-access-jnkzc\") on node \"crc\" DevicePath \"\"" Dec 08 22:11:01 crc kubenswrapper[4912]: I1208 22:11:01.860896 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03d7c2844c9170deb0478539e5807004f2bfcd2ee29a95755373af1654451232" Dec 08 22:11:01 crc kubenswrapper[4912]: I1208 22:11:01.861375 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2jsnr/crc-debug-hmztw" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.212237 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-2jsnr/crc-debug-8vr2t"] Dec 08 22:11:02 crc kubenswrapper[4912]: E1208 22:11:02.212723 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aa4aa90-aa5c-41cc-abc2-2cdad98386d7" containerName="container-00" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.212749 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aa4aa90-aa5c-41cc-abc2-2cdad98386d7" containerName="container-00" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.212972 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="5aa4aa90-aa5c-41cc-abc2-2cdad98386d7" containerName="container-00" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.213864 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2jsnr/crc-debug-8vr2t" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.215845 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-2jsnr"/"default-dockercfg-fc4km" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.400148 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cdc4101b-a0c9-436e-810e-4acaba9d351d-host\") pod \"crc-debug-8vr2t\" (UID: \"cdc4101b-a0c9-436e-810e-4acaba9d351d\") " pod="openshift-must-gather-2jsnr/crc-debug-8vr2t" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.400206 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfd48\" (UniqueName: \"kubernetes.io/projected/cdc4101b-a0c9-436e-810e-4acaba9d351d-kube-api-access-hfd48\") pod \"crc-debug-8vr2t\" (UID: \"cdc4101b-a0c9-436e-810e-4acaba9d351d\") " pod="openshift-must-gather-2jsnr/crc-debug-8vr2t" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.440187 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5aa4aa90-aa5c-41cc-abc2-2cdad98386d7" path="/var/lib/kubelet/pods/5aa4aa90-aa5c-41cc-abc2-2cdad98386d7/volumes" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.502594 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cdc4101b-a0c9-436e-810e-4acaba9d351d-host\") pod \"crc-debug-8vr2t\" (UID: \"cdc4101b-a0c9-436e-810e-4acaba9d351d\") " pod="openshift-must-gather-2jsnr/crc-debug-8vr2t" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.502659 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfd48\" (UniqueName: \"kubernetes.io/projected/cdc4101b-a0c9-436e-810e-4acaba9d351d-kube-api-access-hfd48\") pod \"crc-debug-8vr2t\" (UID: \"cdc4101b-a0c9-436e-810e-4acaba9d351d\") " pod="openshift-must-gather-2jsnr/crc-debug-8vr2t" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.502781 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cdc4101b-a0c9-436e-810e-4acaba9d351d-host\") pod \"crc-debug-8vr2t\" (UID: \"cdc4101b-a0c9-436e-810e-4acaba9d351d\") " pod="openshift-must-gather-2jsnr/crc-debug-8vr2t" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.532378 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfd48\" (UniqueName: \"kubernetes.io/projected/cdc4101b-a0c9-436e-810e-4acaba9d351d-kube-api-access-hfd48\") pod \"crc-debug-8vr2t\" (UID: \"cdc4101b-a0c9-436e-810e-4acaba9d351d\") " pod="openshift-must-gather-2jsnr/crc-debug-8vr2t" Dec 08 22:11:02 crc kubenswrapper[4912]: I1208 22:11:02.832322 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2jsnr/crc-debug-8vr2t" Dec 08 22:11:03 crc kubenswrapper[4912]: I1208 22:11:03.428657 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:11:03 crc kubenswrapper[4912]: I1208 22:11:03.429215 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:11:03 crc kubenswrapper[4912]: E1208 22:11:03.429448 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:11:03 crc kubenswrapper[4912]: E1208 22:11:03.429530 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:11:03 crc kubenswrapper[4912]: I1208 22:11:03.880719 4912 generic.go:334] "Generic (PLEG): container finished" podID="cdc4101b-a0c9-436e-810e-4acaba9d351d" containerID="539c448763d4b9b70266e629d8332a0c8c3ffcbf72ae91c14878faa1392faa20" exitCode=1 Dec 08 22:11:03 crc kubenswrapper[4912]: I1208 22:11:03.880778 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2jsnr/crc-debug-8vr2t" event={"ID":"cdc4101b-a0c9-436e-810e-4acaba9d351d","Type":"ContainerDied","Data":"539c448763d4b9b70266e629d8332a0c8c3ffcbf72ae91c14878faa1392faa20"} Dec 08 22:11:03 crc kubenswrapper[4912]: I1208 22:11:03.880808 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2jsnr/crc-debug-8vr2t" event={"ID":"cdc4101b-a0c9-436e-810e-4acaba9d351d","Type":"ContainerStarted","Data":"75ff9bc65e47c80ba722d41d9ba546407f7a480d1759fb1a4bb674ebb9790e94"} Dec 08 22:11:03 crc kubenswrapper[4912]: I1208 22:11:03.929720 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-2jsnr/crc-debug-8vr2t"] Dec 08 22:11:03 crc kubenswrapper[4912]: I1208 22:11:03.938415 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-2jsnr/crc-debug-8vr2t"] Dec 08 22:11:04 crc kubenswrapper[4912]: I1208 22:11:04.981315 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2jsnr/crc-debug-8vr2t" Dec 08 22:11:05 crc kubenswrapper[4912]: I1208 22:11:05.150023 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cdc4101b-a0c9-436e-810e-4acaba9d351d-host\") pod \"cdc4101b-a0c9-436e-810e-4acaba9d351d\" (UID: \"cdc4101b-a0c9-436e-810e-4acaba9d351d\") " Dec 08 22:11:05 crc kubenswrapper[4912]: I1208 22:11:05.150129 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cdc4101b-a0c9-436e-810e-4acaba9d351d-host" (OuterVolumeSpecName: "host") pod "cdc4101b-a0c9-436e-810e-4acaba9d351d" (UID: "cdc4101b-a0c9-436e-810e-4acaba9d351d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 22:11:05 crc kubenswrapper[4912]: I1208 22:11:05.150639 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfd48\" (UniqueName: \"kubernetes.io/projected/cdc4101b-a0c9-436e-810e-4acaba9d351d-kube-api-access-hfd48\") pod \"cdc4101b-a0c9-436e-810e-4acaba9d351d\" (UID: \"cdc4101b-a0c9-436e-810e-4acaba9d351d\") " Dec 08 22:11:05 crc kubenswrapper[4912]: I1208 22:11:05.151342 4912 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cdc4101b-a0c9-436e-810e-4acaba9d351d-host\") on node \"crc\" DevicePath \"\"" Dec 08 22:11:05 crc kubenswrapper[4912]: I1208 22:11:05.156060 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdc4101b-a0c9-436e-810e-4acaba9d351d-kube-api-access-hfd48" (OuterVolumeSpecName: "kube-api-access-hfd48") pod "cdc4101b-a0c9-436e-810e-4acaba9d351d" (UID: "cdc4101b-a0c9-436e-810e-4acaba9d351d"). InnerVolumeSpecName "kube-api-access-hfd48". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:11:05 crc kubenswrapper[4912]: I1208 22:11:05.253409 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfd48\" (UniqueName: \"kubernetes.io/projected/cdc4101b-a0c9-436e-810e-4acaba9d351d-kube-api-access-hfd48\") on node \"crc\" DevicePath \"\"" Dec 08 22:11:05 crc kubenswrapper[4912]: I1208 22:11:05.899917 4912 scope.go:117] "RemoveContainer" containerID="539c448763d4b9b70266e629d8332a0c8c3ffcbf72ae91c14878faa1392faa20" Dec 08 22:11:05 crc kubenswrapper[4912]: I1208 22:11:05.900141 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2jsnr/crc-debug-8vr2t" Dec 08 22:11:06 crc kubenswrapper[4912]: I1208 22:11:06.441070 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdc4101b-a0c9-436e-810e-4acaba9d351d" path="/var/lib/kubelet/pods/cdc4101b-a0c9-436e-810e-4acaba9d351d/volumes" Dec 08 22:11:16 crc kubenswrapper[4912]: I1208 22:11:16.433694 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:11:16 crc kubenswrapper[4912]: E1208 22:11:16.434487 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:11:18 crc kubenswrapper[4912]: I1208 22:11:18.428108 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:11:18 crc kubenswrapper[4912]: E1208 22:11:18.429025 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:11:27 crc kubenswrapper[4912]: I1208 22:11:27.428531 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:11:27 crc kubenswrapper[4912]: E1208 22:11:27.429343 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:11:30 crc kubenswrapper[4912]: I1208 22:11:30.034332 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-696dcdf5fd-4l78s_1f7341d1-a217-4082-9610-1f882c55186d/barbican-api/0.log" Dec 08 22:11:30 crc kubenswrapper[4912]: I1208 22:11:30.222509 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-696dcdf5fd-4l78s_1f7341d1-a217-4082-9610-1f882c55186d/barbican-api-log/0.log" Dec 08 22:11:30 crc kubenswrapper[4912]: I1208 22:11:30.317966 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-67f646cf6d-bpxc7_4697809d-6d20-495e-a2bb-9b39edb2a09c/barbican-keystone-listener-log/0.log" Dec 08 22:11:30 crc kubenswrapper[4912]: I1208 22:11:30.330979 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-67f646cf6d-bpxc7_4697809d-6d20-495e-a2bb-9b39edb2a09c/barbican-keystone-listener/0.log" Dec 08 22:11:30 crc kubenswrapper[4912]: I1208 22:11:30.516948 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-67f588f949-sxst6_7b0929ad-736b-47b7-8868-99450e21af32/barbican-worker/0.log" Dec 08 22:11:30 crc kubenswrapper[4912]: I1208 22:11:30.520367 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-67f588f949-sxst6_7b0929ad-736b-47b7-8868-99450e21af32/barbican-worker-log/0.log" Dec 08 22:11:30 crc kubenswrapper[4912]: I1208 22:11:30.670421 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_793a5438-7a15-4ff3-b6f1-1f12dbfabe7f/cinder-api/0.log" Dec 08 22:11:30 crc kubenswrapper[4912]: I1208 22:11:30.717723 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_793a5438-7a15-4ff3-b6f1-1f12dbfabe7f/cinder-api-log/0.log" Dec 08 22:11:30 crc kubenswrapper[4912]: I1208 22:11:30.770842 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_cb950bdf-804c-48b5-bfc2-2c92c304f143/cinder-scheduler/0.log" Dec 08 22:11:30 crc kubenswrapper[4912]: I1208 22:11:30.862527 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_cb950bdf-804c-48b5-bfc2-2c92c304f143/probe/0.log" Dec 08 22:11:30 crc kubenswrapper[4912]: I1208 22:11:30.941565 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-2nfps_16e236a9-1cad-40e9-8f00-1f8261e1c96a/init/0.log" Dec 08 22:11:31 crc kubenswrapper[4912]: I1208 22:11:31.175353 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-2nfps_16e236a9-1cad-40e9-8f00-1f8261e1c96a/init/0.log" Dec 08 22:11:31 crc kubenswrapper[4912]: I1208 22:11:31.197275 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_994c9f45-076f-4a96-a8a7-b9e15b90893a/glance-httpd/0.log" Dec 08 22:11:31 crc kubenswrapper[4912]: I1208 22:11:31.214875 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-2nfps_16e236a9-1cad-40e9-8f00-1f8261e1c96a/dnsmasq-dns/0.log" Dec 08 22:11:31 crc kubenswrapper[4912]: I1208 22:11:31.360993 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_994c9f45-076f-4a96-a8a7-b9e15b90893a/glance-log/0.log" Dec 08 22:11:31 crc kubenswrapper[4912]: I1208 22:11:31.407495 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3ecb7bb8-b114-4de3-ba10-ea9537d3daa4/glance-httpd/0.log" Dec 08 22:11:31 crc kubenswrapper[4912]: I1208 22:11:31.428108 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:11:31 crc kubenswrapper[4912]: E1208 22:11:31.428617 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:11:31 crc kubenswrapper[4912]: I1208 22:11:31.441231 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3ecb7bb8-b114-4de3-ba10-ea9537d3daa4/glance-log/0.log" Dec 08 22:11:31 crc kubenswrapper[4912]: I1208 22:11:31.773668 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29420521-kmpmr_46da716d-9ea1-452b-b59b-48609b5ad68a/keystone-cron/0.log" Dec 08 22:11:31 crc kubenswrapper[4912]: I1208 22:11:31.906569 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-d797fb44f-6g7nm_de0dec9e-1a2d-47c8-9a2e-10aa43d9a01b/keystone-api/0.log" Dec 08 22:11:32 crc kubenswrapper[4912]: I1208 22:11:32.222253 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-66784bb489-2dwmz_1e51ce4b-382f-4365-a312-0219b77aab6e/neutron-api/0.log" Dec 08 22:11:32 crc kubenswrapper[4912]: I1208 22:11:32.317793 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-66784bb489-2dwmz_1e51ce4b-382f-4365-a312-0219b77aab6e/neutron-httpd/0.log" Dec 08 22:11:32 crc kubenswrapper[4912]: I1208 22:11:32.689243 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3/nova-api-log/0.log" Dec 08 22:11:32 crc kubenswrapper[4912]: I1208 22:11:32.756808 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_9adc2e28-5a3b-4cc7-9ea6-c99f2c6a97d3/nova-api-api/0.log" Dec 08 22:11:33 crc kubenswrapper[4912]: I1208 22:11:33.031495 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_2a84a18f-003f-48fb-b522-290957dbb5db/nova-cell0-conductor-conductor/0.log" Dec 08 22:11:33 crc kubenswrapper[4912]: I1208 22:11:33.127023 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_4a8f2fe2-7084-419b-90cc-880b395eea7d/nova-cell1-conductor-conductor/0.log" Dec 08 22:11:33 crc kubenswrapper[4912]: I1208 22:11:33.374156 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_7298f5bf-63e0-446a-b351-bd2dea532216/nova-cell1-novncproxy-novncproxy/0.log" Dec 08 22:11:33 crc kubenswrapper[4912]: I1208 22:11:33.571669 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c094983f-9b35-4488-9d83-215a52b906a4/nova-metadata-log/0.log" Dec 08 22:11:33 crc kubenswrapper[4912]: I1208 22:11:33.937925 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_f0790114-108f-4b92-915e-807fd4a7e0aa/nova-scheduler-scheduler/0.log" Dec 08 22:11:34 crc kubenswrapper[4912]: I1208 22:11:34.100607 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_35406345-ffad-4596-b323-22e156a4e481/mysql-bootstrap/0.log" Dec 08 22:11:34 crc kubenswrapper[4912]: I1208 22:11:34.422803 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c094983f-9b35-4488-9d83-215a52b906a4/nova-metadata-metadata/0.log" Dec 08 22:11:34 crc kubenswrapper[4912]: I1208 22:11:34.457960 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_35406345-ffad-4596-b323-22e156a4e481/galera/0.log" Dec 08 22:11:34 crc kubenswrapper[4912]: I1208 22:11:34.532872 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_35406345-ffad-4596-b323-22e156a4e481/mysql-bootstrap/0.log" Dec 08 22:11:34 crc kubenswrapper[4912]: I1208 22:11:34.703343 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_914d6cd9-3b13-4f31-bed5-aaf5c553cea9/mysql-bootstrap/0.log" Dec 08 22:11:34 crc kubenswrapper[4912]: I1208 22:11:34.847489 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_914d6cd9-3b13-4f31-bed5-aaf5c553cea9/mysql-bootstrap/0.log" Dec 08 22:11:34 crc kubenswrapper[4912]: I1208 22:11:34.876785 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_914d6cd9-3b13-4f31-bed5-aaf5c553cea9/galera/0.log" Dec 08 22:11:34 crc kubenswrapper[4912]: I1208 22:11:34.944394 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_6b9f7656-3af5-4d88-a713-1dad51007309/openstackclient/0.log" Dec 08 22:11:35 crc kubenswrapper[4912]: I1208 22:11:35.087746 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-mbvfl_55fdb9f6-a68c-4e39-9d14-394de4306337/openstack-network-exporter/0.log" Dec 08 22:11:35 crc kubenswrapper[4912]: I1208 22:11:35.169871 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dx49f_12a9d079-3756-4164-967c-be9bd7758724/ovsdb-server-init/0.log" Dec 08 22:11:35 crc kubenswrapper[4912]: I1208 22:11:35.365177 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dx49f_12a9d079-3756-4164-967c-be9bd7758724/ovsdb-server-init/0.log" Dec 08 22:11:35 crc kubenswrapper[4912]: I1208 22:11:35.418008 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dx49f_12a9d079-3756-4164-967c-be9bd7758724/ovsdb-server/0.log" Dec 08 22:11:35 crc kubenswrapper[4912]: I1208 22:11:35.437572 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dx49f_12a9d079-3756-4164-967c-be9bd7758724/ovs-vswitchd/0.log" Dec 08 22:11:35 crc kubenswrapper[4912]: I1208 22:11:35.604320 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-vwsgt_0456360f-7543-4af2-ad73-07d0332d3ce2/ovn-controller/0.log" Dec 08 22:11:35 crc kubenswrapper[4912]: I1208 22:11:35.714907 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1d0808e2-0fb2-45d9-a814-001c0f02f969/openstack-network-exporter/0.log" Dec 08 22:11:35 crc kubenswrapper[4912]: I1208 22:11:35.757605 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1d0808e2-0fb2-45d9-a814-001c0f02f969/ovn-northd/0.log" Dec 08 22:11:35 crc kubenswrapper[4912]: I1208 22:11:35.900808 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_7a5a0512-8f42-46d4-8806-d410f3270705/openstack-network-exporter/0.log" Dec 08 22:11:35 crc kubenswrapper[4912]: I1208 22:11:35.982739 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_7a5a0512-8f42-46d4-8806-d410f3270705/ovsdbserver-nb/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.100541 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_fe2d86c8-7891-4421-b2c3-914ed8948d3c/ovsdbserver-sb/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.168230 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_fe2d86c8-7891-4421-b2c3-914ed8948d3c/openstack-network-exporter/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.191626 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_0a8e59b7-40e5-4c2e-aead-21245661c02e/memcached/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.296661 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-868f84fc6-f7svl_826aae34-f2b3-4cf5-8d59-04a1ba33a2b5/placement-api/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.336158 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-868f84fc6-f7svl_826aae34-f2b3-4cf5-8d59-04a1ba33a2b5/placement-log/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.411752 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_f725cb12-94d8-42af-9930-d1d8a17ae9a7/setup-container/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.644892 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_f725cb12-94d8-42af-9930-d1d8a17ae9a7/setup-container/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.670854 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_621df657-49db-4768-8ad5-6676531990d4/setup-container/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.695599 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_f725cb12-94d8-42af-9930-d1d8a17ae9a7/rabbitmq/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.879543 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_621df657-49db-4768-8ad5-6676531990d4/rabbitmq/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.884428 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_621df657-49db-4768-8ad5-6676531990d4/setup-container/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.959126 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-589fbdcc4f-69fll_20bcf0b1-ac41-4641-8287-cd62c7ab1157/proxy-server/0.log" Dec 08 22:11:36 crc kubenswrapper[4912]: I1208 22:11:36.975769 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-589fbdcc4f-69fll_20bcf0b1-ac41-4641-8287-cd62c7ab1157/proxy-httpd/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.121282 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-twtkt_d2fd975a-48c8-42a1-a81d-869c32e97dc8/swift-ring-rebalance/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.150611 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/account-auditor/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.177931 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/account-reaper/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.292629 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/account-replicator/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.304939 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/account-server/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.352164 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/container-auditor/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.363494 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/container-replicator/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.408745 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/container-server/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.493460 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/container-updater/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.510187 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/object-auditor/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.555751 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/object-expirer/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.572813 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/object-replicator/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.604630 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/object-server/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.713194 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/object-updater/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.723598 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/rsync/0.log" Dec 08 22:11:37 crc kubenswrapper[4912]: I1208 22:11:37.735316 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_928f7959-0e9f-4b2d-bfa2-2d970196f49f/swift-recon-cron/0.log" Dec 08 22:11:38 crc kubenswrapper[4912]: I1208 22:11:38.780569 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-njsg7"] Dec 08 22:11:38 crc kubenswrapper[4912]: E1208 22:11:38.780974 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdc4101b-a0c9-436e-810e-4acaba9d351d" containerName="container-00" Dec 08 22:11:38 crc kubenswrapper[4912]: I1208 22:11:38.780985 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdc4101b-a0c9-436e-810e-4acaba9d351d" containerName="container-00" Dec 08 22:11:38 crc kubenswrapper[4912]: I1208 22:11:38.781263 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdc4101b-a0c9-436e-810e-4acaba9d351d" containerName="container-00" Dec 08 22:11:38 crc kubenswrapper[4912]: I1208 22:11:38.782781 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:38 crc kubenswrapper[4912]: I1208 22:11:38.795007 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-njsg7"] Dec 08 22:11:38 crc kubenswrapper[4912]: I1208 22:11:38.926503 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwrm7\" (UniqueName: \"kubernetes.io/projected/29c32c60-8406-462b-a9fc-919347270766-kube-api-access-gwrm7\") pod \"community-operators-njsg7\" (UID: \"29c32c60-8406-462b-a9fc-919347270766\") " pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:38 crc kubenswrapper[4912]: I1208 22:11:38.926852 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c32c60-8406-462b-a9fc-919347270766-utilities\") pod \"community-operators-njsg7\" (UID: \"29c32c60-8406-462b-a9fc-919347270766\") " pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:38 crc kubenswrapper[4912]: I1208 22:11:38.926883 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c32c60-8406-462b-a9fc-919347270766-catalog-content\") pod \"community-operators-njsg7\" (UID: \"29c32c60-8406-462b-a9fc-919347270766\") " pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:39 crc kubenswrapper[4912]: I1208 22:11:39.029006 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwrm7\" (UniqueName: \"kubernetes.io/projected/29c32c60-8406-462b-a9fc-919347270766-kube-api-access-gwrm7\") pod \"community-operators-njsg7\" (UID: \"29c32c60-8406-462b-a9fc-919347270766\") " pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:39 crc kubenswrapper[4912]: I1208 22:11:39.029172 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c32c60-8406-462b-a9fc-919347270766-utilities\") pod \"community-operators-njsg7\" (UID: \"29c32c60-8406-462b-a9fc-919347270766\") " pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:39 crc kubenswrapper[4912]: I1208 22:11:39.029224 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c32c60-8406-462b-a9fc-919347270766-catalog-content\") pod \"community-operators-njsg7\" (UID: \"29c32c60-8406-462b-a9fc-919347270766\") " pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:39 crc kubenswrapper[4912]: I1208 22:11:39.029651 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c32c60-8406-462b-a9fc-919347270766-utilities\") pod \"community-operators-njsg7\" (UID: \"29c32c60-8406-462b-a9fc-919347270766\") " pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:39 crc kubenswrapper[4912]: I1208 22:11:39.029719 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c32c60-8406-462b-a9fc-919347270766-catalog-content\") pod \"community-operators-njsg7\" (UID: \"29c32c60-8406-462b-a9fc-919347270766\") " pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:39 crc kubenswrapper[4912]: I1208 22:11:39.049795 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwrm7\" (UniqueName: \"kubernetes.io/projected/29c32c60-8406-462b-a9fc-919347270766-kube-api-access-gwrm7\") pod \"community-operators-njsg7\" (UID: \"29c32c60-8406-462b-a9fc-919347270766\") " pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:39 crc kubenswrapper[4912]: I1208 22:11:39.104095 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:39 crc kubenswrapper[4912]: I1208 22:11:39.644193 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-njsg7"] Dec 08 22:11:40 crc kubenswrapper[4912]: I1208 22:11:40.369510 4912 generic.go:334] "Generic (PLEG): container finished" podID="29c32c60-8406-462b-a9fc-919347270766" containerID="c406efe17cfee6e12cebedaaa78e1ed12150e5ad90a1ba5663dce428d5122481" exitCode=0 Dec 08 22:11:40 crc kubenswrapper[4912]: I1208 22:11:40.369612 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-njsg7" event={"ID":"29c32c60-8406-462b-a9fc-919347270766","Type":"ContainerDied","Data":"c406efe17cfee6e12cebedaaa78e1ed12150e5ad90a1ba5663dce428d5122481"} Dec 08 22:11:40 crc kubenswrapper[4912]: I1208 22:11:40.369813 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-njsg7" event={"ID":"29c32c60-8406-462b-a9fc-919347270766","Type":"ContainerStarted","Data":"2e27d7c8ff0a72268c9e984303748d95671548a56f5755fec580b3074e1ac1b9"} Dec 08 22:11:40 crc kubenswrapper[4912]: I1208 22:11:40.371667 4912 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 22:11:40 crc kubenswrapper[4912]: I1208 22:11:40.428329 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:11:40 crc kubenswrapper[4912]: E1208 22:11:40.428648 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:11:42 crc kubenswrapper[4912]: I1208 22:11:42.391220 4912 generic.go:334] "Generic (PLEG): container finished" podID="29c32c60-8406-462b-a9fc-919347270766" containerID="77ca0ef14bc5bf465654e0cc67e870a734308197674f1cc8ff4b197c159860db" exitCode=0 Dec 08 22:11:42 crc kubenswrapper[4912]: I1208 22:11:42.391264 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-njsg7" event={"ID":"29c32c60-8406-462b-a9fc-919347270766","Type":"ContainerDied","Data":"77ca0ef14bc5bf465654e0cc67e870a734308197674f1cc8ff4b197c159860db"} Dec 08 22:11:43 crc kubenswrapper[4912]: I1208 22:11:43.403542 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-njsg7" event={"ID":"29c32c60-8406-462b-a9fc-919347270766","Type":"ContainerStarted","Data":"fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4"} Dec 08 22:11:43 crc kubenswrapper[4912]: I1208 22:11:43.427632 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:11:43 crc kubenswrapper[4912]: E1208 22:11:43.427907 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:11:49 crc kubenswrapper[4912]: I1208 22:11:49.104822 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:49 crc kubenswrapper[4912]: I1208 22:11:49.105410 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:49 crc kubenswrapper[4912]: I1208 22:11:49.164305 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:49 crc kubenswrapper[4912]: I1208 22:11:49.183383 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-njsg7" podStartSLOduration=8.71397698 podStartE2EDuration="11.183363012s" podCreationTimestamp="2025-12-08 22:11:38 +0000 UTC" firstStartedPulling="2025-12-08 22:11:40.371432465 +0000 UTC m=+3182.234434548" lastFinishedPulling="2025-12-08 22:11:42.840818497 +0000 UTC m=+3184.703820580" observedRunningTime="2025-12-08 22:11:43.43029583 +0000 UTC m=+3185.293297933" watchObservedRunningTime="2025-12-08 22:11:49.183363012 +0000 UTC m=+3191.046365095" Dec 08 22:11:49 crc kubenswrapper[4912]: I1208 22:11:49.517027 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:49 crc kubenswrapper[4912]: I1208 22:11:49.878815 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-njsg7"] Dec 08 22:11:51 crc kubenswrapper[4912]: I1208 22:11:51.490634 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-njsg7" podUID="29c32c60-8406-462b-a9fc-919347270766" containerName="registry-server" containerID="cri-o://fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4" gracePeriod=2 Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.427689 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:11:52 crc kubenswrapper[4912]: E1208 22:11:52.428647 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.443026 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.502257 4912 generic.go:334] "Generic (PLEG): container finished" podID="29c32c60-8406-462b-a9fc-919347270766" containerID="fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4" exitCode=0 Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.502325 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-njsg7" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.502325 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-njsg7" event={"ID":"29c32c60-8406-462b-a9fc-919347270766","Type":"ContainerDied","Data":"fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4"} Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.502450 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-njsg7" event={"ID":"29c32c60-8406-462b-a9fc-919347270766","Type":"ContainerDied","Data":"2e27d7c8ff0a72268c9e984303748d95671548a56f5755fec580b3074e1ac1b9"} Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.502474 4912 scope.go:117] "RemoveContainer" containerID="fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.524756 4912 scope.go:117] "RemoveContainer" containerID="77ca0ef14bc5bf465654e0cc67e870a734308197674f1cc8ff4b197c159860db" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.551651 4912 scope.go:117] "RemoveContainer" containerID="c406efe17cfee6e12cebedaaa78e1ed12150e5ad90a1ba5663dce428d5122481" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.582664 4912 scope.go:117] "RemoveContainer" containerID="fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4" Dec 08 22:11:52 crc kubenswrapper[4912]: E1208 22:11:52.583107 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4\": container with ID starting with fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4 not found: ID does not exist" containerID="fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.583158 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4"} err="failed to get container status \"fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4\": rpc error: code = NotFound desc = could not find container \"fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4\": container with ID starting with fe0cbbaed3027f86c749fee832f3e68a443305a45684ef863d7c3d17f8c355d4 not found: ID does not exist" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.583193 4912 scope.go:117] "RemoveContainer" containerID="77ca0ef14bc5bf465654e0cc67e870a734308197674f1cc8ff4b197c159860db" Dec 08 22:11:52 crc kubenswrapper[4912]: E1208 22:11:52.583485 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77ca0ef14bc5bf465654e0cc67e870a734308197674f1cc8ff4b197c159860db\": container with ID starting with 77ca0ef14bc5bf465654e0cc67e870a734308197674f1cc8ff4b197c159860db not found: ID does not exist" containerID="77ca0ef14bc5bf465654e0cc67e870a734308197674f1cc8ff4b197c159860db" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.583521 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77ca0ef14bc5bf465654e0cc67e870a734308197674f1cc8ff4b197c159860db"} err="failed to get container status \"77ca0ef14bc5bf465654e0cc67e870a734308197674f1cc8ff4b197c159860db\": rpc error: code = NotFound desc = could not find container \"77ca0ef14bc5bf465654e0cc67e870a734308197674f1cc8ff4b197c159860db\": container with ID starting with 77ca0ef14bc5bf465654e0cc67e870a734308197674f1cc8ff4b197c159860db not found: ID does not exist" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.583546 4912 scope.go:117] "RemoveContainer" containerID="c406efe17cfee6e12cebedaaa78e1ed12150e5ad90a1ba5663dce428d5122481" Dec 08 22:11:52 crc kubenswrapper[4912]: E1208 22:11:52.583760 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c406efe17cfee6e12cebedaaa78e1ed12150e5ad90a1ba5663dce428d5122481\": container with ID starting with c406efe17cfee6e12cebedaaa78e1ed12150e5ad90a1ba5663dce428d5122481 not found: ID does not exist" containerID="c406efe17cfee6e12cebedaaa78e1ed12150e5ad90a1ba5663dce428d5122481" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.583783 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c406efe17cfee6e12cebedaaa78e1ed12150e5ad90a1ba5663dce428d5122481"} err="failed to get container status \"c406efe17cfee6e12cebedaaa78e1ed12150e5ad90a1ba5663dce428d5122481\": rpc error: code = NotFound desc = could not find container \"c406efe17cfee6e12cebedaaa78e1ed12150e5ad90a1ba5663dce428d5122481\": container with ID starting with c406efe17cfee6e12cebedaaa78e1ed12150e5ad90a1ba5663dce428d5122481 not found: ID does not exist" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.590624 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwrm7\" (UniqueName: \"kubernetes.io/projected/29c32c60-8406-462b-a9fc-919347270766-kube-api-access-gwrm7\") pod \"29c32c60-8406-462b-a9fc-919347270766\" (UID: \"29c32c60-8406-462b-a9fc-919347270766\") " Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.590792 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c32c60-8406-462b-a9fc-919347270766-catalog-content\") pod \"29c32c60-8406-462b-a9fc-919347270766\" (UID: \"29c32c60-8406-462b-a9fc-919347270766\") " Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.591106 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c32c60-8406-462b-a9fc-919347270766-utilities\") pod \"29c32c60-8406-462b-a9fc-919347270766\" (UID: \"29c32c60-8406-462b-a9fc-919347270766\") " Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.591882 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29c32c60-8406-462b-a9fc-919347270766-utilities" (OuterVolumeSpecName: "utilities") pod "29c32c60-8406-462b-a9fc-919347270766" (UID: "29c32c60-8406-462b-a9fc-919347270766"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.598490 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29c32c60-8406-462b-a9fc-919347270766-kube-api-access-gwrm7" (OuterVolumeSpecName: "kube-api-access-gwrm7") pod "29c32c60-8406-462b-a9fc-919347270766" (UID: "29c32c60-8406-462b-a9fc-919347270766"). InnerVolumeSpecName "kube-api-access-gwrm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.692915 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwrm7\" (UniqueName: \"kubernetes.io/projected/29c32c60-8406-462b-a9fc-919347270766-kube-api-access-gwrm7\") on node \"crc\" DevicePath \"\"" Dec 08 22:11:52 crc kubenswrapper[4912]: I1208 22:11:52.693217 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c32c60-8406-462b-a9fc-919347270766-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:11:53 crc kubenswrapper[4912]: I1208 22:11:53.217830 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29c32c60-8406-462b-a9fc-919347270766-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29c32c60-8406-462b-a9fc-919347270766" (UID: "29c32c60-8406-462b-a9fc-919347270766"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:11:53 crc kubenswrapper[4912]: I1208 22:11:53.305961 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c32c60-8406-462b-a9fc-919347270766-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:11:53 crc kubenswrapper[4912]: I1208 22:11:53.432651 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-njsg7"] Dec 08 22:11:53 crc kubenswrapper[4912]: I1208 22:11:53.443000 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-njsg7"] Dec 08 22:11:54 crc kubenswrapper[4912]: I1208 22:11:54.439006 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29c32c60-8406-462b-a9fc-919347270766" path="/var/lib/kubelet/pods/29c32c60-8406-462b-a9fc-919347270766/volumes" Dec 08 22:11:57 crc kubenswrapper[4912]: I1208 22:11:57.427735 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:11:57 crc kubenswrapper[4912]: E1208 22:11:57.428296 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:11:58 crc kubenswrapper[4912]: I1208 22:11:58.450586 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t_ef89f58e-50c9-488e-bed0-bcfaac1e2851/util/0.log" Dec 08 22:11:58 crc kubenswrapper[4912]: I1208 22:11:58.634273 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t_ef89f58e-50c9-488e-bed0-bcfaac1e2851/util/0.log" Dec 08 22:11:58 crc kubenswrapper[4912]: I1208 22:11:58.643398 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t_ef89f58e-50c9-488e-bed0-bcfaac1e2851/pull/0.log" Dec 08 22:11:58 crc kubenswrapper[4912]: I1208 22:11:58.673484 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t_ef89f58e-50c9-488e-bed0-bcfaac1e2851/pull/0.log" Dec 08 22:11:58 crc kubenswrapper[4912]: I1208 22:11:58.851850 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t_ef89f58e-50c9-488e-bed0-bcfaac1e2851/util/0.log" Dec 08 22:11:58 crc kubenswrapper[4912]: I1208 22:11:58.853329 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t_ef89f58e-50c9-488e-bed0-bcfaac1e2851/extract/0.log" Dec 08 22:11:58 crc kubenswrapper[4912]: I1208 22:11:58.878466 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6d7e8ab3e836a1bc30f99f0123132495e6e8509b878c211e2d7548009blgf8t_ef89f58e-50c9-488e-bed0-bcfaac1e2851/pull/0.log" Dec 08 22:11:59 crc kubenswrapper[4912]: I1208 22:11:59.020517 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-tsz7z_b4ebbd1e-bbd8-4290-8745-ed80c67abf66/kube-rbac-proxy/0.log" Dec 08 22:11:59 crc kubenswrapper[4912]: I1208 22:11:59.066211 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-mpssj_b47b551b-07c5-4fc7-b6a9-76208870148f/kube-rbac-proxy/0.log" Dec 08 22:11:59 crc kubenswrapper[4912]: I1208 22:11:59.212298 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-tsz7z_b4ebbd1e-bbd8-4290-8745-ed80c67abf66/manager/0.log" Dec 08 22:11:59 crc kubenswrapper[4912]: I1208 22:11:59.330016 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-mpssj_b47b551b-07c5-4fc7-b6a9-76208870148f/manager/0.log" Dec 08 22:11:59 crc kubenswrapper[4912]: I1208 22:11:59.392463 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-plw7z_0bf34a60-0aa4-4408-84f8-7848cf76086f/manager/0.log" Dec 08 22:11:59 crc kubenswrapper[4912]: I1208 22:11:59.420583 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-plw7z_0bf34a60-0aa4-4408-84f8-7848cf76086f/kube-rbac-proxy/0.log" Dec 08 22:11:59 crc kubenswrapper[4912]: I1208 22:11:59.583042 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-54mxl_ce1d46fe-d3fc-4386-a545-3e4513ca68c3/kube-rbac-proxy/0.log" Dec 08 22:11:59 crc kubenswrapper[4912]: I1208 22:11:59.641396 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-54mxl_ce1d46fe-d3fc-4386-a545-3e4513ca68c3/manager/0.log" Dec 08 22:11:59 crc kubenswrapper[4912]: I1208 22:11:59.777006 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-dqmmb_b95889d5-9404-4bc5-867a-5bf1492855db/kube-rbac-proxy/0.log" Dec 08 22:11:59 crc kubenswrapper[4912]: I1208 22:11:59.793164 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-dqmmb_b95889d5-9404-4bc5-867a-5bf1492855db/manager/0.log" Dec 08 22:11:59 crc kubenswrapper[4912]: I1208 22:11:59.860437 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-r9g4v_0e45e244-50c1-4b0b-8e49-615f31b2cf2c/kube-rbac-proxy/0.log" Dec 08 22:11:59 crc kubenswrapper[4912]: I1208 22:11:59.958679 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-r9g4v_0e45e244-50c1-4b0b-8e49-615f31b2cf2c/manager/0.log" Dec 08 22:12:00 crc kubenswrapper[4912]: I1208 22:12:00.052065 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-nxm5d_27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c/kube-rbac-proxy/0.log" Dec 08 22:12:00 crc kubenswrapper[4912]: I1208 22:12:00.307821 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-fmxw5_ef95618f-ec2f-438b-ba4f-15cb5e057b6e/kube-rbac-proxy/0.log" Dec 08 22:12:00 crc kubenswrapper[4912]: I1208 22:12:00.315687 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-nxm5d_27d2d4c3-9e6c-4c8c-91e9-715559a9bf4c/manager/0.log" Dec 08 22:12:00 crc kubenswrapper[4912]: I1208 22:12:00.364200 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-fmxw5_ef95618f-ec2f-438b-ba4f-15cb5e057b6e/manager/0.log" Dec 08 22:12:00 crc kubenswrapper[4912]: I1208 22:12:00.542024 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-hfdwp_a8da65eb-3b52-473d-93c2-da58da0d0cfc/kube-rbac-proxy/0.log" Dec 08 22:12:00 crc kubenswrapper[4912]: I1208 22:12:00.590285 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-hfdwp_a8da65eb-3b52-473d-93c2-da58da0d0cfc/manager/0.log" Dec 08 22:12:00 crc kubenswrapper[4912]: I1208 22:12:00.702865 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-xjlfc_dd47e823-cffc-4455-ae03-a29000d733ab/kube-rbac-proxy/0.log" Dec 08 22:12:00 crc kubenswrapper[4912]: I1208 22:12:00.760818 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-xjlfc_dd47e823-cffc-4455-ae03-a29000d733ab/manager/0.log" Dec 08 22:12:00 crc kubenswrapper[4912]: I1208 22:12:00.783425 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-wpc67_7967d486-bea2-4064-8fbd-658052c9ac9f/kube-rbac-proxy/0.log" Dec 08 22:12:00 crc kubenswrapper[4912]: I1208 22:12:00.931439 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-wpc67_7967d486-bea2-4064-8fbd-658052c9ac9f/manager/0.log" Dec 08 22:12:00 crc kubenswrapper[4912]: I1208 22:12:00.989092 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-4gfv4_d737be23-9586-4023-b01e-a9f7161b3b4c/kube-rbac-proxy/0.log" Dec 08 22:12:01 crc kubenswrapper[4912]: I1208 22:12:01.043687 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-4gfv4_d737be23-9586-4023-b01e-a9f7161b3b4c/manager/0.log" Dec 08 22:12:01 crc kubenswrapper[4912]: I1208 22:12:01.298546 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-qsllt_92f66d4c-5b7c-4bc0-820d-3319fa35a16b/kube-rbac-proxy/0.log" Dec 08 22:12:01 crc kubenswrapper[4912]: I1208 22:12:01.322349 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-qsllt_92f66d4c-5b7c-4bc0-820d-3319fa35a16b/manager/0.log" Dec 08 22:12:01 crc kubenswrapper[4912]: I1208 22:12:01.435366 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-9xkjf_dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33/kube-rbac-proxy/0.log" Dec 08 22:12:01 crc kubenswrapper[4912]: I1208 22:12:01.506496 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879f7p4sp_cbe56e26-bee2-4664-abc8-2d7ff76aa32e/kube-rbac-proxy/0.log" Dec 08 22:12:01 crc kubenswrapper[4912]: I1208 22:12:01.538078 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-9xkjf_dd7ab0b5-66b3-4260-920a-5ca7fc4d7c33/manager/0.log" Dec 08 22:12:01 crc kubenswrapper[4912]: I1208 22:12:01.681417 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879f7p4sp_cbe56e26-bee2-4664-abc8-2d7ff76aa32e/manager/0.log" Dec 08 22:12:02 crc kubenswrapper[4912]: I1208 22:12:02.147534 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-dg4gd_960d8c65-06be-4c35-8529-f1a8b7440b1d/registry-server/0.log" Dec 08 22:12:02 crc kubenswrapper[4912]: I1208 22:12:02.407374 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-zxmcp_9c67d467-660a-4bbc-a32c-b197db949502/kube-rbac-proxy/0.log" Dec 08 22:12:02 crc kubenswrapper[4912]: I1208 22:12:02.422369 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7d4449658c-rc2sv_121fb516-7dae-4a50-ac6c-6d0bf7781dce/operator/0.log" Dec 08 22:12:02 crc kubenswrapper[4912]: I1208 22:12:02.711701 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-zxmcp_9c67d467-660a-4bbc-a32c-b197db949502/manager/0.log" Dec 08 22:12:02 crc kubenswrapper[4912]: I1208 22:12:02.741735 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-54c84cffdd-hp5gh_99cab708-e8b8-4a28-8a36-f91964fc84e1/manager/0.log" Dec 08 22:12:02 crc kubenswrapper[4912]: I1208 22:12:02.748768 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-8cqw8_4cfa9728-de47-4dfa-96d9-53b1c591e650/kube-rbac-proxy/0.log" Dec 08 22:12:02 crc kubenswrapper[4912]: I1208 22:12:02.844275 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-8cqw8_4cfa9728-de47-4dfa-96d9-53b1c591e650/manager/0.log" Dec 08 22:12:02 crc kubenswrapper[4912]: I1208 22:12:02.925353 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-qgzdx_243e9fe4-27ef-4bd8-82b4-abd8ea8c8f9b/operator/0.log" Dec 08 22:12:02 crc kubenswrapper[4912]: I1208 22:12:02.990911 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-5s8dd_fb8b700a-18a4-49ce-86cb-a38e2ff4cb58/kube-rbac-proxy/0.log" Dec 08 22:12:03 crc kubenswrapper[4912]: I1208 22:12:03.082201 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-5s8dd_fb8b700a-18a4-49ce-86cb-a38e2ff4cb58/manager/0.log" Dec 08 22:12:03 crc kubenswrapper[4912]: I1208 22:12:03.145999 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65f6d9c768-xfcfd_17fc5fff-819f-4786-8e6d-9a0d6265e8ce/kube-rbac-proxy/0.log" Dec 08 22:12:03 crc kubenswrapper[4912]: I1208 22:12:03.210018 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65f6d9c768-xfcfd_17fc5fff-819f-4786-8e6d-9a0d6265e8ce/manager/7.log" Dec 08 22:12:03 crc kubenswrapper[4912]: I1208 22:12:03.216072 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65f6d9c768-xfcfd_17fc5fff-819f-4786-8e6d-9a0d6265e8ce/manager/7.log" Dec 08 22:12:03 crc kubenswrapper[4912]: I1208 22:12:03.324078 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-sr59v_b17c499c-4624-462b-a672-cd23b4b63301/kube-rbac-proxy/0.log" Dec 08 22:12:03 crc kubenswrapper[4912]: I1208 22:12:03.381089 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-sr59v_b17c499c-4624-462b-a672-cd23b4b63301/manager/0.log" Dec 08 22:12:03 crc kubenswrapper[4912]: I1208 22:12:03.444414 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-667bd8d554-5twmg_e36bfc63-943e-49f5-ab0b-021474292dc7/kube-rbac-proxy/0.log" Dec 08 22:12:03 crc kubenswrapper[4912]: I1208 22:12:03.517243 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-667bd8d554-5twmg_e36bfc63-943e-49f5-ab0b-021474292dc7/manager/0.log" Dec 08 22:12:06 crc kubenswrapper[4912]: I1208 22:12:06.428266 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:12:06 crc kubenswrapper[4912]: E1208 22:12:06.428955 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:12:08 crc kubenswrapper[4912]: I1208 22:12:08.428613 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:12:08 crc kubenswrapper[4912]: E1208 22:12:08.430351 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:12:17 crc kubenswrapper[4912]: I1208 22:12:17.428291 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:12:17 crc kubenswrapper[4912]: E1208 22:12:17.430159 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:12:20 crc kubenswrapper[4912]: I1208 22:12:20.428242 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:12:20 crc kubenswrapper[4912]: E1208 22:12:20.429114 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:12:22 crc kubenswrapper[4912]: I1208 22:12:22.069164 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-bx7nc_6ba5e948-9e8e-432b-9973-e0248bc2b82e/control-plane-machine-set-operator/0.log" Dec 08 22:12:22 crc kubenswrapper[4912]: I1208 22:12:22.253822 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-d848b_5c9f4edc-e900-4571-8d81-4f253bfe8ea1/kube-rbac-proxy/0.log" Dec 08 22:12:22 crc kubenswrapper[4912]: I1208 22:12:22.297213 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-d848b_5c9f4edc-e900-4571-8d81-4f253bfe8ea1/machine-api-operator/0.log" Dec 08 22:12:30 crc kubenswrapper[4912]: I1208 22:12:30.428692 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:12:30 crc kubenswrapper[4912]: E1208 22:12:30.429492 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:12:32 crc kubenswrapper[4912]: I1208 22:12:32.427864 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:12:32 crc kubenswrapper[4912]: E1208 22:12:32.428591 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:12:33 crc kubenswrapper[4912]: I1208 22:12:33.851692 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-wjwt6_68ebf962-3731-4e13-87a4-c34dc485ab22/cert-manager-controller/0.log" Dec 08 22:12:34 crc kubenswrapper[4912]: I1208 22:12:34.036642 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-57j6j_f8d9e945-baf1-49fc-9c67-c1005db9f615/cert-manager-cainjector/0.log" Dec 08 22:12:34 crc kubenswrapper[4912]: I1208 22:12:34.099727 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-btb4l_a7bc3095-bbd0-4351-aca3-8537df3c82a1/cert-manager-webhook/0.log" Dec 08 22:12:43 crc kubenswrapper[4912]: I1208 22:12:43.427649 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:12:43 crc kubenswrapper[4912]: E1208 22:12:43.428511 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:12:44 crc kubenswrapper[4912]: I1208 22:12:44.427800 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:12:44 crc kubenswrapper[4912]: E1208 22:12:44.428176 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:12:46 crc kubenswrapper[4912]: I1208 22:12:46.455649 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-h2jzn_4a6801dc-930c-4a3b-9a0d-2455e26011a7/nmstate-console-plugin/0.log" Dec 08 22:12:46 crc kubenswrapper[4912]: I1208 22:12:46.695062 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-t75jg_07d5dc75-389c-464e-aba6-8d1a4dd1e736/nmstate-handler/0.log" Dec 08 22:12:46 crc kubenswrapper[4912]: I1208 22:12:46.733611 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-9gtf4_666ca373-3dfd-4cc5-bcbe-2d2cc8335b14/kube-rbac-proxy/0.log" Dec 08 22:12:46 crc kubenswrapper[4912]: I1208 22:12:46.784868 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-9gtf4_666ca373-3dfd-4cc5-bcbe-2d2cc8335b14/nmstate-metrics/0.log" Dec 08 22:12:46 crc kubenswrapper[4912]: I1208 22:12:46.955399 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-b9w27_031db5a0-79e6-4206-8b1a-200a5862e1d1/nmstate-webhook/0.log" Dec 08 22:12:46 crc kubenswrapper[4912]: I1208 22:12:46.982289 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-29q7z_7dcc5a2b-b055-4bdd-903e-ec9772a7877e/nmstate-operator/0.log" Dec 08 22:12:54 crc kubenswrapper[4912]: I1208 22:12:54.428121 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:12:54 crc kubenswrapper[4912]: E1208 22:12:54.428893 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:12:55 crc kubenswrapper[4912]: I1208 22:12:55.428728 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:12:55 crc kubenswrapper[4912]: E1208 22:12:55.429325 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:13:00 crc kubenswrapper[4912]: I1208 22:13:00.142219 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-74685c6454-9rqb2_d70194be-442d-46e2-84c6-6572c73fb5a8/manager/0.log" Dec 08 22:13:00 crc kubenswrapper[4912]: I1208 22:13:00.209336 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-74685c6454-9rqb2_d70194be-442d-46e2-84c6-6572c73fb5a8/kube-rbac-proxy/0.log" Dec 08 22:13:05 crc kubenswrapper[4912]: I1208 22:13:05.428280 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:13:05 crc kubenswrapper[4912]: E1208 22:13:05.428979 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:13:06 crc kubenswrapper[4912]: I1208 22:13:06.428344 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:13:06 crc kubenswrapper[4912]: E1208 22:13:06.428627 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:13:12 crc kubenswrapper[4912]: I1208 22:13:12.945234 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-rbzfs_5348f01f-774a-4d17-9a26-ddc251ec89f8/kube-rbac-proxy/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.076677 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-rbzfs_5348f01f-774a-4d17-9a26-ddc251ec89f8/controller/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.188608 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/cp-frr-files/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.324358 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/cp-frr-files/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.355450 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/cp-metrics/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.375303 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/cp-reloader/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.386139 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/cp-reloader/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.585166 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/cp-reloader/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.588655 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/cp-metrics/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.595693 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/cp-metrics/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.625707 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/cp-frr-files/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.803881 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/controller/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.806817 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/cp-frr-files/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.814417 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/cp-metrics/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.820338 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/cp-reloader/0.log" Dec 08 22:13:13 crc kubenswrapper[4912]: I1208 22:13:13.952608 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/frr-metrics/0.log" Dec 08 22:13:14 crc kubenswrapper[4912]: I1208 22:13:14.009945 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/kube-rbac-proxy/0.log" Dec 08 22:13:14 crc kubenswrapper[4912]: I1208 22:13:14.083472 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/kube-rbac-proxy-frr/0.log" Dec 08 22:13:14 crc kubenswrapper[4912]: I1208 22:13:14.154125 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/reloader/0.log" Dec 08 22:13:14 crc kubenswrapper[4912]: I1208 22:13:14.351513 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-h6ntw_0b46dd74-03d1-4aa9-8bae-24cc229206aa/frr-k8s-webhook-server/0.log" Dec 08 22:13:14 crc kubenswrapper[4912]: I1208 22:13:14.530161 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-c7c694577-x2mvr_1ae06dae-d5d5-4365-abca-c7a177b0fb56/manager/0.log" Dec 08 22:13:14 crc kubenswrapper[4912]: I1208 22:13:14.648866 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-84d7d76888-grwms_033dc0ca-3e1a-4c56-a687-bedb4849f5cd/webhook-server/0.log" Dec 08 22:13:14 crc kubenswrapper[4912]: I1208 22:13:14.822581 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-xrxk5_60cae323-cb71-49a6-90e4-7ac76e98ec75/kube-rbac-proxy/0.log" Dec 08 22:13:14 crc kubenswrapper[4912]: I1208 22:13:14.836123 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-tt6k7_c03a9518-39ca-436c-84ca-d02a8e6ef6d7/frr/0.log" Dec 08 22:13:15 crc kubenswrapper[4912]: I1208 22:13:15.137692 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-xrxk5_60cae323-cb71-49a6-90e4-7ac76e98ec75/speaker/0.log" Dec 08 22:13:19 crc kubenswrapper[4912]: I1208 22:13:19.427977 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:13:19 crc kubenswrapper[4912]: I1208 22:13:19.428634 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:13:19 crc kubenswrapper[4912]: E1208 22:13:19.428756 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:13:19 crc kubenswrapper[4912]: E1208 22:13:19.428911 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:13:27 crc kubenswrapper[4912]: I1208 22:13:27.475413 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg_98eb7224-c5a5-46c2-8b6c-c515e010fb28/util/0.log" Dec 08 22:13:27 crc kubenswrapper[4912]: I1208 22:13:27.719959 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg_98eb7224-c5a5-46c2-8b6c-c515e010fb28/util/0.log" Dec 08 22:13:27 crc kubenswrapper[4912]: I1208 22:13:27.791409 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg_98eb7224-c5a5-46c2-8b6c-c515e010fb28/pull/0.log" Dec 08 22:13:27 crc kubenswrapper[4912]: I1208 22:13:27.835873 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg_98eb7224-c5a5-46c2-8b6c-c515e010fb28/pull/0.log" Dec 08 22:13:27 crc kubenswrapper[4912]: I1208 22:13:27.973720 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg_98eb7224-c5a5-46c2-8b6c-c515e010fb28/util/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.024864 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg_98eb7224-c5a5-46c2-8b6c-c515e010fb28/pull/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.081103 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fs68dg_98eb7224-c5a5-46c2-8b6c-c515e010fb28/extract/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.202754 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c_a3f1f4ed-5050-40db-9fe0-7979c52368e0/util/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.407219 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c_a3f1f4ed-5050-40db-9fe0-7979c52368e0/pull/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.416876 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c_a3f1f4ed-5050-40db-9fe0-7979c52368e0/util/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.436630 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c_a3f1f4ed-5050-40db-9fe0-7979c52368e0/pull/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.578455 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c_a3f1f4ed-5050-40db-9fe0-7979c52368e0/pull/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.582115 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c_a3f1f4ed-5050-40db-9fe0-7979c52368e0/util/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.611802 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210fk98c_a3f1f4ed-5050-40db-9fe0-7979c52368e0/extract/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.774405 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl_23c5450e-4908-4bb9-a24f-09b8016d6b0a/util/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.941549 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl_23c5450e-4908-4bb9-a24f-09b8016d6b0a/pull/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.947270 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl_23c5450e-4908-4bb9-a24f-09b8016d6b0a/util/0.log" Dec 08 22:13:28 crc kubenswrapper[4912]: I1208 22:13:28.952425 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl_23c5450e-4908-4bb9-a24f-09b8016d6b0a/pull/0.log" Dec 08 22:13:29 crc kubenswrapper[4912]: I1208 22:13:29.109545 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl_23c5450e-4908-4bb9-a24f-09b8016d6b0a/util/0.log" Dec 08 22:13:29 crc kubenswrapper[4912]: I1208 22:13:29.141634 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl_23c5450e-4908-4bb9-a24f-09b8016d6b0a/extract/0.log" Dec 08 22:13:29 crc kubenswrapper[4912]: I1208 22:13:29.161009 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c14drbl_23c5450e-4908-4bb9-a24f-09b8016d6b0a/pull/0.log" Dec 08 22:13:29 crc kubenswrapper[4912]: I1208 22:13:29.305492 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692_c14041a7-869f-4553-8da9-0ac2c0ca9d7a/util/0.log" Dec 08 22:13:29 crc kubenswrapper[4912]: I1208 22:13:29.486263 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692_c14041a7-869f-4553-8da9-0ac2c0ca9d7a/pull/0.log" Dec 08 22:13:29 crc kubenswrapper[4912]: I1208 22:13:29.505728 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692_c14041a7-869f-4553-8da9-0ac2c0ca9d7a/util/0.log" Dec 08 22:13:29 crc kubenswrapper[4912]: I1208 22:13:29.525844 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692_c14041a7-869f-4553-8da9-0ac2c0ca9d7a/pull/0.log" Dec 08 22:13:29 crc kubenswrapper[4912]: I1208 22:13:29.668784 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692_c14041a7-869f-4553-8da9-0ac2c0ca9d7a/util/0.log" Dec 08 22:13:29 crc kubenswrapper[4912]: I1208 22:13:29.718900 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692_c14041a7-869f-4553-8da9-0ac2c0ca9d7a/pull/0.log" Dec 08 22:13:29 crc kubenswrapper[4912]: I1208 22:13:29.748836 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839n692_c14041a7-869f-4553-8da9-0ac2c0ca9d7a/extract/0.log" Dec 08 22:13:29 crc kubenswrapper[4912]: I1208 22:13:29.884081 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-fvbph_518c2aa9-b6d8-49c4-9e4c-f6731eef84f2/extract-utilities/0.log" Dec 08 22:13:30 crc kubenswrapper[4912]: I1208 22:13:30.039540 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-fvbph_518c2aa9-b6d8-49c4-9e4c-f6731eef84f2/extract-utilities/0.log" Dec 08 22:13:30 crc kubenswrapper[4912]: I1208 22:13:30.062665 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-fvbph_518c2aa9-b6d8-49c4-9e4c-f6731eef84f2/extract-content/0.log" Dec 08 22:13:30 crc kubenswrapper[4912]: I1208 22:13:30.075496 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-fvbph_518c2aa9-b6d8-49c4-9e4c-f6731eef84f2/extract-content/0.log" Dec 08 22:13:30 crc kubenswrapper[4912]: I1208 22:13:30.361539 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-fvbph_518c2aa9-b6d8-49c4-9e4c-f6731eef84f2/extract-content/0.log" Dec 08 22:13:30 crc kubenswrapper[4912]: I1208 22:13:30.410726 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-fvbph_518c2aa9-b6d8-49c4-9e4c-f6731eef84f2/extract-utilities/0.log" Dec 08 22:13:30 crc kubenswrapper[4912]: I1208 22:13:30.462847 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-fvbph_518c2aa9-b6d8-49c4-9e4c-f6731eef84f2/registry-server/0.log" Dec 08 22:13:30 crc kubenswrapper[4912]: I1208 22:13:30.559590 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vjr7b_e29efa51-b798-4e0a-bf88-27affe2b33ab/extract-utilities/0.log" Dec 08 22:13:30 crc kubenswrapper[4912]: I1208 22:13:30.939678 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vjr7b_e29efa51-b798-4e0a-bf88-27affe2b33ab/extract-content/0.log" Dec 08 22:13:30 crc kubenswrapper[4912]: I1208 22:13:30.974485 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vjr7b_e29efa51-b798-4e0a-bf88-27affe2b33ab/extract-utilities/0.log" Dec 08 22:13:30 crc kubenswrapper[4912]: I1208 22:13:30.986761 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vjr7b_e29efa51-b798-4e0a-bf88-27affe2b33ab/extract-content/0.log" Dec 08 22:13:31 crc kubenswrapper[4912]: I1208 22:13:31.192237 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vjr7b_e29efa51-b798-4e0a-bf88-27affe2b33ab/extract-content/0.log" Dec 08 22:13:31 crc kubenswrapper[4912]: I1208 22:13:31.197340 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vjr7b_e29efa51-b798-4e0a-bf88-27affe2b33ab/extract-utilities/0.log" Dec 08 22:13:31 crc kubenswrapper[4912]: I1208 22:13:31.427335 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:13:31 crc kubenswrapper[4912]: E1208 22:13:31.427581 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:13:31 crc kubenswrapper[4912]: I1208 22:13:31.440173 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-zgnwz_fc8cf81c-b57c-4849-9e7c-10b0753855b1/marketplace-operator/0.log" Dec 08 22:13:31 crc kubenswrapper[4912]: I1208 22:13:31.562474 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2vsmr_f24ef83a-a9e3-4bc2-931c-92673a9e6347/extract-utilities/0.log" Dec 08 22:13:31 crc kubenswrapper[4912]: I1208 22:13:31.633067 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vjr7b_e29efa51-b798-4e0a-bf88-27affe2b33ab/registry-server/0.log" Dec 08 22:13:31 crc kubenswrapper[4912]: I1208 22:13:31.768380 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2vsmr_f24ef83a-a9e3-4bc2-931c-92673a9e6347/extract-utilities/0.log" Dec 08 22:13:31 crc kubenswrapper[4912]: I1208 22:13:31.775325 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2vsmr_f24ef83a-a9e3-4bc2-931c-92673a9e6347/extract-content/0.log" Dec 08 22:13:31 crc kubenswrapper[4912]: I1208 22:13:31.800104 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2vsmr_f24ef83a-a9e3-4bc2-931c-92673a9e6347/extract-content/0.log" Dec 08 22:13:32 crc kubenswrapper[4912]: I1208 22:13:32.025130 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2vsmr_f24ef83a-a9e3-4bc2-931c-92673a9e6347/extract-content/0.log" Dec 08 22:13:32 crc kubenswrapper[4912]: I1208 22:13:32.032370 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2vsmr_f24ef83a-a9e3-4bc2-931c-92673a9e6347/extract-utilities/0.log" Dec 08 22:13:32 crc kubenswrapper[4912]: I1208 22:13:32.101590 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpq5d_90e4ab2d-73de-458b-adc0-ffb3b9f50de2/extract-utilities/0.log" Dec 08 22:13:32 crc kubenswrapper[4912]: I1208 22:13:32.121355 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2vsmr_f24ef83a-a9e3-4bc2-931c-92673a9e6347/registry-server/0.log" Dec 08 22:13:32 crc kubenswrapper[4912]: I1208 22:13:32.297169 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpq5d_90e4ab2d-73de-458b-adc0-ffb3b9f50de2/extract-utilities/0.log" Dec 08 22:13:32 crc kubenswrapper[4912]: I1208 22:13:32.310252 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpq5d_90e4ab2d-73de-458b-adc0-ffb3b9f50de2/extract-content/0.log" Dec 08 22:13:32 crc kubenswrapper[4912]: I1208 22:13:32.350113 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpq5d_90e4ab2d-73de-458b-adc0-ffb3b9f50de2/extract-content/0.log" Dec 08 22:13:32 crc kubenswrapper[4912]: I1208 22:13:32.488349 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpq5d_90e4ab2d-73de-458b-adc0-ffb3b9f50de2/extract-utilities/0.log" Dec 08 22:13:32 crc kubenswrapper[4912]: I1208 22:13:32.488675 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpq5d_90e4ab2d-73de-458b-adc0-ffb3b9f50de2/extract-content/0.log" Dec 08 22:13:32 crc kubenswrapper[4912]: I1208 22:13:32.901531 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpq5d_90e4ab2d-73de-458b-adc0-ffb3b9f50de2/registry-server/0.log" Dec 08 22:13:34 crc kubenswrapper[4912]: I1208 22:13:34.427419 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:13:34 crc kubenswrapper[4912]: E1208 22:13:34.427641 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:13:45 crc kubenswrapper[4912]: I1208 22:13:45.427751 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:13:45 crc kubenswrapper[4912]: I1208 22:13:45.428365 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:13:45 crc kubenswrapper[4912]: E1208 22:13:45.428542 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:13:45 crc kubenswrapper[4912]: E1208 22:13:45.428603 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:13:45 crc kubenswrapper[4912]: I1208 22:13:45.820529 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-gxbwp_065174a4-9aac-46c1-b83c-71861f156ee3/prometheus-operator/0.log" Dec 08 22:13:46 crc kubenswrapper[4912]: I1208 22:13:46.046337 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-785d46d47d-h2xz6_7b4799e4-efd5-4f47-b53b-a056d4a3d046/prometheus-operator-admission-webhook/0.log" Dec 08 22:13:46 crc kubenswrapper[4912]: I1208 22:13:46.054276 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-785d46d47d-wgntc_e9a7a69b-d12e-48e7-899f-2c919d23d906/prometheus-operator-admission-webhook/0.log" Dec 08 22:13:46 crc kubenswrapper[4912]: I1208 22:13:46.296610 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-rc26h_b9c5dc1e-7823-4b2d-9983-8e23244bb2b9/operator/0.log" Dec 08 22:13:46 crc kubenswrapper[4912]: I1208 22:13:46.400710 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-7t6lg_e7a6b0fa-1136-439f-a0bd-96f6855dec8f/perses-operator/0.log" Dec 08 22:13:56 crc kubenswrapper[4912]: I1208 22:13:56.430061 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:13:56 crc kubenswrapper[4912]: I1208 22:13:56.431698 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:13:56 crc kubenswrapper[4912]: E1208 22:13:56.432161 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:13:56 crc kubenswrapper[4912]: E1208 22:13:56.433119 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:13:58 crc kubenswrapper[4912]: I1208 22:13:58.263382 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-74685c6454-9rqb2_d70194be-442d-46e2-84c6-6572c73fb5a8/manager/0.log" Dec 08 22:13:58 crc kubenswrapper[4912]: I1208 22:13:58.271898 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-74685c6454-9rqb2_d70194be-442d-46e2-84c6-6572c73fb5a8/kube-rbac-proxy/0.log" Dec 08 22:14:07 crc kubenswrapper[4912]: I1208 22:14:07.427885 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:14:07 crc kubenswrapper[4912]: E1208 22:14:07.429685 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:14:08 crc kubenswrapper[4912]: I1208 22:14:08.434639 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:14:08 crc kubenswrapper[4912]: I1208 22:14:08.847274 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerStarted","Data":"7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5"} Dec 08 22:14:08 crc kubenswrapper[4912]: I1208 22:14:08.847947 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 22:14:15 crc kubenswrapper[4912]: I1208 22:14:15.252754 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 22:14:22 crc kubenswrapper[4912]: I1208 22:14:22.428151 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:14:22 crc kubenswrapper[4912]: E1208 22:14:22.428916 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:14:36 crc kubenswrapper[4912]: I1208 22:14:36.428532 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:14:36 crc kubenswrapper[4912]: E1208 22:14:36.429364 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:14:50 crc kubenswrapper[4912]: I1208 22:14:50.428017 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:14:50 crc kubenswrapper[4912]: E1208 22:14:50.429408 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.166004 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5"] Dec 08 22:15:00 crc kubenswrapper[4912]: E1208 22:15:00.167334 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c32c60-8406-462b-a9fc-919347270766" containerName="registry-server" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.167366 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c32c60-8406-462b-a9fc-919347270766" containerName="registry-server" Dec 08 22:15:00 crc kubenswrapper[4912]: E1208 22:15:00.167398 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c32c60-8406-462b-a9fc-919347270766" containerName="extract-utilities" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.167406 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c32c60-8406-462b-a9fc-919347270766" containerName="extract-utilities" Dec 08 22:15:00 crc kubenswrapper[4912]: E1208 22:15:00.167418 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c32c60-8406-462b-a9fc-919347270766" containerName="extract-content" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.167427 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c32c60-8406-462b-a9fc-919347270766" containerName="extract-content" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.167770 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="29c32c60-8406-462b-a9fc-919347270766" containerName="registry-server" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.168697 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.171378 4912 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.171451 4912 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.178402 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5"] Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.330164 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bq6s\" (UniqueName: \"kubernetes.io/projected/95956487-e1d6-4e73-b59c-6be45d182e62-kube-api-access-2bq6s\") pod \"collect-profiles-29420535-l57t5\" (UID: \"95956487-e1d6-4e73-b59c-6be45d182e62\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.330218 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/95956487-e1d6-4e73-b59c-6be45d182e62-secret-volume\") pod \"collect-profiles-29420535-l57t5\" (UID: \"95956487-e1d6-4e73-b59c-6be45d182e62\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.330402 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95956487-e1d6-4e73-b59c-6be45d182e62-config-volume\") pod \"collect-profiles-29420535-l57t5\" (UID: \"95956487-e1d6-4e73-b59c-6be45d182e62\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.432457 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95956487-e1d6-4e73-b59c-6be45d182e62-config-volume\") pod \"collect-profiles-29420535-l57t5\" (UID: \"95956487-e1d6-4e73-b59c-6be45d182e62\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.432663 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bq6s\" (UniqueName: \"kubernetes.io/projected/95956487-e1d6-4e73-b59c-6be45d182e62-kube-api-access-2bq6s\") pod \"collect-profiles-29420535-l57t5\" (UID: \"95956487-e1d6-4e73-b59c-6be45d182e62\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.432732 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/95956487-e1d6-4e73-b59c-6be45d182e62-secret-volume\") pod \"collect-profiles-29420535-l57t5\" (UID: \"95956487-e1d6-4e73-b59c-6be45d182e62\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.435897 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95956487-e1d6-4e73-b59c-6be45d182e62-config-volume\") pod \"collect-profiles-29420535-l57t5\" (UID: \"95956487-e1d6-4e73-b59c-6be45d182e62\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.438583 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/95956487-e1d6-4e73-b59c-6be45d182e62-secret-volume\") pod \"collect-profiles-29420535-l57t5\" (UID: \"95956487-e1d6-4e73-b59c-6be45d182e62\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.469013 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bq6s\" (UniqueName: \"kubernetes.io/projected/95956487-e1d6-4e73-b59c-6be45d182e62-kube-api-access-2bq6s\") pod \"collect-profiles-29420535-l57t5\" (UID: \"95956487-e1d6-4e73-b59c-6be45d182e62\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.501603 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:00 crc kubenswrapper[4912]: I1208 22:15:00.978556 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5"] Dec 08 22:15:01 crc kubenswrapper[4912]: I1208 22:15:01.651697 4912 generic.go:334] "Generic (PLEG): container finished" podID="95956487-e1d6-4e73-b59c-6be45d182e62" containerID="0ecd462ce1bebcdcef0f592261da99449f8d2a7dd64f140318c6d33f8f0bdee2" exitCode=0 Dec 08 22:15:01 crc kubenswrapper[4912]: I1208 22:15:01.652502 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" event={"ID":"95956487-e1d6-4e73-b59c-6be45d182e62","Type":"ContainerDied","Data":"0ecd462ce1bebcdcef0f592261da99449f8d2a7dd64f140318c6d33f8f0bdee2"} Dec 08 22:15:01 crc kubenswrapper[4912]: I1208 22:15:01.652544 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" event={"ID":"95956487-e1d6-4e73-b59c-6be45d182e62","Type":"ContainerStarted","Data":"bcc51edadc599813b232cc09c5c545ab632d8b32d31d3707318ce78f1e4bfbc5"} Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.091611 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.170205 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bq6s\" (UniqueName: \"kubernetes.io/projected/95956487-e1d6-4e73-b59c-6be45d182e62-kube-api-access-2bq6s\") pod \"95956487-e1d6-4e73-b59c-6be45d182e62\" (UID: \"95956487-e1d6-4e73-b59c-6be45d182e62\") " Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.170432 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/95956487-e1d6-4e73-b59c-6be45d182e62-secret-volume\") pod \"95956487-e1d6-4e73-b59c-6be45d182e62\" (UID: \"95956487-e1d6-4e73-b59c-6be45d182e62\") " Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.170485 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95956487-e1d6-4e73-b59c-6be45d182e62-config-volume\") pod \"95956487-e1d6-4e73-b59c-6be45d182e62\" (UID: \"95956487-e1d6-4e73-b59c-6be45d182e62\") " Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.171232 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95956487-e1d6-4e73-b59c-6be45d182e62-config-volume" (OuterVolumeSpecName: "config-volume") pod "95956487-e1d6-4e73-b59c-6be45d182e62" (UID: "95956487-e1d6-4e73-b59c-6be45d182e62"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.175826 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95956487-e1d6-4e73-b59c-6be45d182e62-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "95956487-e1d6-4e73-b59c-6be45d182e62" (UID: "95956487-e1d6-4e73-b59c-6be45d182e62"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.176291 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95956487-e1d6-4e73-b59c-6be45d182e62-kube-api-access-2bq6s" (OuterVolumeSpecName: "kube-api-access-2bq6s") pod "95956487-e1d6-4e73-b59c-6be45d182e62" (UID: "95956487-e1d6-4e73-b59c-6be45d182e62"). InnerVolumeSpecName "kube-api-access-2bq6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.273051 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bq6s\" (UniqueName: \"kubernetes.io/projected/95956487-e1d6-4e73-b59c-6be45d182e62-kube-api-access-2bq6s\") on node \"crc\" DevicePath \"\"" Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.273087 4912 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/95956487-e1d6-4e73-b59c-6be45d182e62-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.273096 4912 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95956487-e1d6-4e73-b59c-6be45d182e62-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.677136 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" event={"ID":"95956487-e1d6-4e73-b59c-6be45d182e62","Type":"ContainerDied","Data":"bcc51edadc599813b232cc09c5c545ab632d8b32d31d3707318ce78f1e4bfbc5"} Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.677205 4912 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bcc51edadc599813b232cc09c5c545ab632d8b32d31d3707318ce78f1e4bfbc5" Dec 08 22:15:03 crc kubenswrapper[4912]: I1208 22:15:03.677283 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29420535-l57t5" Dec 08 22:15:04 crc kubenswrapper[4912]: I1208 22:15:04.172206 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz"] Dec 08 22:15:04 crc kubenswrapper[4912]: I1208 22:15:04.181458 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29420490-sh8gz"] Dec 08 22:15:04 crc kubenswrapper[4912]: I1208 22:15:04.428057 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:15:04 crc kubenswrapper[4912]: E1208 22:15:04.428346 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:15:04 crc kubenswrapper[4912]: I1208 22:15:04.440086 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71f0bde6-5047-441f-ab47-d77e824847c1" path="/var/lib/kubelet/pods/71f0bde6-5047-441f-ab47-d77e824847c1/volumes" Dec 08 22:15:11 crc kubenswrapper[4912]: I1208 22:15:11.884771 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gtdl2"] Dec 08 22:15:11 crc kubenswrapper[4912]: E1208 22:15:11.890099 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95956487-e1d6-4e73-b59c-6be45d182e62" containerName="collect-profiles" Dec 08 22:15:11 crc kubenswrapper[4912]: I1208 22:15:11.890143 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="95956487-e1d6-4e73-b59c-6be45d182e62" containerName="collect-profiles" Dec 08 22:15:11 crc kubenswrapper[4912]: I1208 22:15:11.890399 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="95956487-e1d6-4e73-b59c-6be45d182e62" containerName="collect-profiles" Dec 08 22:15:11 crc kubenswrapper[4912]: I1208 22:15:11.892386 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:11 crc kubenswrapper[4912]: I1208 22:15:11.897065 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gtdl2"] Dec 08 22:15:11 crc kubenswrapper[4912]: I1208 22:15:11.918612 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2vqp\" (UniqueName: \"kubernetes.io/projected/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-kube-api-access-g2vqp\") pod \"redhat-operators-gtdl2\" (UID: \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\") " pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:11 crc kubenswrapper[4912]: I1208 22:15:11.918746 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-utilities\") pod \"redhat-operators-gtdl2\" (UID: \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\") " pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:11 crc kubenswrapper[4912]: I1208 22:15:11.918903 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-catalog-content\") pod \"redhat-operators-gtdl2\" (UID: \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\") " pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:12 crc kubenswrapper[4912]: I1208 22:15:12.020817 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-catalog-content\") pod \"redhat-operators-gtdl2\" (UID: \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\") " pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:12 crc kubenswrapper[4912]: I1208 22:15:12.020979 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2vqp\" (UniqueName: \"kubernetes.io/projected/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-kube-api-access-g2vqp\") pod \"redhat-operators-gtdl2\" (UID: \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\") " pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:12 crc kubenswrapper[4912]: I1208 22:15:12.021014 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-utilities\") pod \"redhat-operators-gtdl2\" (UID: \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\") " pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:12 crc kubenswrapper[4912]: I1208 22:15:12.021489 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-catalog-content\") pod \"redhat-operators-gtdl2\" (UID: \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\") " pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:12 crc kubenswrapper[4912]: I1208 22:15:12.021517 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-utilities\") pod \"redhat-operators-gtdl2\" (UID: \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\") " pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:12 crc kubenswrapper[4912]: I1208 22:15:12.040547 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2vqp\" (UniqueName: \"kubernetes.io/projected/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-kube-api-access-g2vqp\") pod \"redhat-operators-gtdl2\" (UID: \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\") " pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:12 crc kubenswrapper[4912]: I1208 22:15:12.220511 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:12 crc kubenswrapper[4912]: I1208 22:15:12.784686 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gtdl2"] Dec 08 22:15:13 crc kubenswrapper[4912]: I1208 22:15:13.776654 4912 generic.go:334] "Generic (PLEG): container finished" podID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" containerID="ab0fd6b9112c72dabf0fc7495b98949c17bacccb022517553b303151454f70e4" exitCode=0 Dec 08 22:15:13 crc kubenswrapper[4912]: I1208 22:15:13.776707 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtdl2" event={"ID":"690b4ff7-b08c-40a4-8741-b5b01df4b3d5","Type":"ContainerDied","Data":"ab0fd6b9112c72dabf0fc7495b98949c17bacccb022517553b303151454f70e4"} Dec 08 22:15:13 crc kubenswrapper[4912]: I1208 22:15:13.776928 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtdl2" event={"ID":"690b4ff7-b08c-40a4-8741-b5b01df4b3d5","Type":"ContainerStarted","Data":"dabc6b40e3d912e29ec42a08096aa18e181cab91f31169be42ecb4dd5b78ecb7"} Dec 08 22:15:14 crc kubenswrapper[4912]: I1208 22:15:14.787234 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtdl2" event={"ID":"690b4ff7-b08c-40a4-8741-b5b01df4b3d5","Type":"ContainerStarted","Data":"5914fc16ad2a2dfe119c9a3de8c9c70f2b2f19b32945a1c3d04cfb316211336b"} Dec 08 22:15:19 crc kubenswrapper[4912]: I1208 22:15:19.427785 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:15:19 crc kubenswrapper[4912]: E1208 22:15:19.428502 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:15:19 crc kubenswrapper[4912]: I1208 22:15:19.740072 4912 generic.go:334] "Generic (PLEG): container finished" podID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" containerID="5914fc16ad2a2dfe119c9a3de8c9c70f2b2f19b32945a1c3d04cfb316211336b" exitCode=0 Dec 08 22:15:19 crc kubenswrapper[4912]: I1208 22:15:19.740178 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtdl2" event={"ID":"690b4ff7-b08c-40a4-8741-b5b01df4b3d5","Type":"ContainerDied","Data":"5914fc16ad2a2dfe119c9a3de8c9c70f2b2f19b32945a1c3d04cfb316211336b"} Dec 08 22:15:20 crc kubenswrapper[4912]: I1208 22:15:20.751676 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtdl2" event={"ID":"690b4ff7-b08c-40a4-8741-b5b01df4b3d5","Type":"ContainerStarted","Data":"d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4"} Dec 08 22:15:20 crc kubenswrapper[4912]: I1208 22:15:20.777322 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gtdl2" podStartSLOduration=3.132680657 podStartE2EDuration="9.777277188s" podCreationTimestamp="2025-12-08 22:15:11 +0000 UTC" firstStartedPulling="2025-12-08 22:15:13.778423462 +0000 UTC m=+3395.641425545" lastFinishedPulling="2025-12-08 22:15:20.423019993 +0000 UTC m=+3402.286022076" observedRunningTime="2025-12-08 22:15:20.772208792 +0000 UTC m=+3402.635210875" watchObservedRunningTime="2025-12-08 22:15:20.777277188 +0000 UTC m=+3402.640279271" Dec 08 22:15:22 crc kubenswrapper[4912]: I1208 22:15:22.221247 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:22 crc kubenswrapper[4912]: I1208 22:15:22.221830 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:23 crc kubenswrapper[4912]: I1208 22:15:23.299602 4912 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gtdl2" podUID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" containerName="registry-server" probeResult="failure" output=< Dec 08 22:15:23 crc kubenswrapper[4912]: timeout: failed to connect service ":50051" within 1s Dec 08 22:15:23 crc kubenswrapper[4912]: > Dec 08 22:15:26 crc kubenswrapper[4912]: I1208 22:15:26.803613 4912 generic.go:334] "Generic (PLEG): container finished" podID="f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701" containerID="cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd" exitCode=0 Dec 08 22:15:26 crc kubenswrapper[4912]: I1208 22:15:26.804234 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2jsnr/must-gather-ll99v" event={"ID":"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701","Type":"ContainerDied","Data":"cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd"} Dec 08 22:15:26 crc kubenswrapper[4912]: I1208 22:15:26.805050 4912 scope.go:117] "RemoveContainer" containerID="cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd" Dec 08 22:15:27 crc kubenswrapper[4912]: I1208 22:15:27.538799 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-2jsnr_must-gather-ll99v_f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701/gather/0.log" Dec 08 22:15:30 crc kubenswrapper[4912]: I1208 22:15:30.432879 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:15:30 crc kubenswrapper[4912]: E1208 22:15:30.433358 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-74dp4_openshift-machine-config-operator(831b06bd-095f-439f-a166-088c2d584933)\"" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" Dec 08 22:15:32 crc kubenswrapper[4912]: I1208 22:15:32.269250 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:32 crc kubenswrapper[4912]: I1208 22:15:32.321276 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:32 crc kubenswrapper[4912]: I1208 22:15:32.509843 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gtdl2"] Dec 08 22:15:33 crc kubenswrapper[4912]: I1208 22:15:33.866061 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gtdl2" podUID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" containerName="registry-server" containerID="cri-o://d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4" gracePeriod=2 Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.305992 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.320851 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-catalog-content\") pod \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\" (UID: \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\") " Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.320963 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-utilities\") pod \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\" (UID: \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\") " Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.324178 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2vqp\" (UniqueName: \"kubernetes.io/projected/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-kube-api-access-g2vqp\") pod \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\" (UID: \"690b4ff7-b08c-40a4-8741-b5b01df4b3d5\") " Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.325114 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-utilities" (OuterVolumeSpecName: "utilities") pod "690b4ff7-b08c-40a4-8741-b5b01df4b3d5" (UID: "690b4ff7-b08c-40a4-8741-b5b01df4b3d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.325317 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.334517 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-kube-api-access-g2vqp" (OuterVolumeSpecName: "kube-api-access-g2vqp") pod "690b4ff7-b08c-40a4-8741-b5b01df4b3d5" (UID: "690b4ff7-b08c-40a4-8741-b5b01df4b3d5"). InnerVolumeSpecName "kube-api-access-g2vqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.427047 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2vqp\" (UniqueName: \"kubernetes.io/projected/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-kube-api-access-g2vqp\") on node \"crc\" DevicePath \"\"" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.453796 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "690b4ff7-b08c-40a4-8741-b5b01df4b3d5" (UID: "690b4ff7-b08c-40a4-8741-b5b01df4b3d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.529555 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/690b4ff7-b08c-40a4-8741-b5b01df4b3d5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.876546 4912 generic.go:334] "Generic (PLEG): container finished" podID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" containerID="d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4" exitCode=0 Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.876676 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtdl2" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.877905 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtdl2" event={"ID":"690b4ff7-b08c-40a4-8741-b5b01df4b3d5","Type":"ContainerDied","Data":"d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4"} Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.878088 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtdl2" event={"ID":"690b4ff7-b08c-40a4-8741-b5b01df4b3d5","Type":"ContainerDied","Data":"dabc6b40e3d912e29ec42a08096aa18e181cab91f31169be42ecb4dd5b78ecb7"} Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.878185 4912 scope.go:117] "RemoveContainer" containerID="d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.916216 4912 scope.go:117] "RemoveContainer" containerID="5914fc16ad2a2dfe119c9a3de8c9c70f2b2f19b32945a1c3d04cfb316211336b" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.916774 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gtdl2"] Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.945662 4912 scope.go:117] "RemoveContainer" containerID="ab0fd6b9112c72dabf0fc7495b98949c17bacccb022517553b303151454f70e4" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.948326 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gtdl2"] Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.996796 4912 scope.go:117] "RemoveContainer" containerID="d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4" Dec 08 22:15:34 crc kubenswrapper[4912]: E1208 22:15:34.998304 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4\": container with ID starting with d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4 not found: ID does not exist" containerID="d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.998394 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4"} err="failed to get container status \"d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4\": rpc error: code = NotFound desc = could not find container \"d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4\": container with ID starting with d10c41270d0b483b30c541de00c8d936db501b3a53db86c69807c5573747e6d4 not found: ID does not exist" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.998461 4912 scope.go:117] "RemoveContainer" containerID="5914fc16ad2a2dfe119c9a3de8c9c70f2b2f19b32945a1c3d04cfb316211336b" Dec 08 22:15:34 crc kubenswrapper[4912]: E1208 22:15:34.998864 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5914fc16ad2a2dfe119c9a3de8c9c70f2b2f19b32945a1c3d04cfb316211336b\": container with ID starting with 5914fc16ad2a2dfe119c9a3de8c9c70f2b2f19b32945a1c3d04cfb316211336b not found: ID does not exist" containerID="5914fc16ad2a2dfe119c9a3de8c9c70f2b2f19b32945a1c3d04cfb316211336b" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.998898 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5914fc16ad2a2dfe119c9a3de8c9c70f2b2f19b32945a1c3d04cfb316211336b"} err="failed to get container status \"5914fc16ad2a2dfe119c9a3de8c9c70f2b2f19b32945a1c3d04cfb316211336b\": rpc error: code = NotFound desc = could not find container \"5914fc16ad2a2dfe119c9a3de8c9c70f2b2f19b32945a1c3d04cfb316211336b\": container with ID starting with 5914fc16ad2a2dfe119c9a3de8c9c70f2b2f19b32945a1c3d04cfb316211336b not found: ID does not exist" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.998918 4912 scope.go:117] "RemoveContainer" containerID="ab0fd6b9112c72dabf0fc7495b98949c17bacccb022517553b303151454f70e4" Dec 08 22:15:34 crc kubenswrapper[4912]: E1208 22:15:34.999553 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab0fd6b9112c72dabf0fc7495b98949c17bacccb022517553b303151454f70e4\": container with ID starting with ab0fd6b9112c72dabf0fc7495b98949c17bacccb022517553b303151454f70e4 not found: ID does not exist" containerID="ab0fd6b9112c72dabf0fc7495b98949c17bacccb022517553b303151454f70e4" Dec 08 22:15:34 crc kubenswrapper[4912]: I1208 22:15:34.999583 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab0fd6b9112c72dabf0fc7495b98949c17bacccb022517553b303151454f70e4"} err="failed to get container status \"ab0fd6b9112c72dabf0fc7495b98949c17bacccb022517553b303151454f70e4\": rpc error: code = NotFound desc = could not find container \"ab0fd6b9112c72dabf0fc7495b98949c17bacccb022517553b303151454f70e4\": container with ID starting with ab0fd6b9112c72dabf0fc7495b98949c17bacccb022517553b303151454f70e4 not found: ID does not exist" Dec 08 22:15:35 crc kubenswrapper[4912]: I1208 22:15:35.875207 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-2jsnr/must-gather-ll99v"] Dec 08 22:15:35 crc kubenswrapper[4912]: I1208 22:15:35.875815 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-2jsnr/must-gather-ll99v" podUID="f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701" containerName="copy" containerID="cri-o://57fe59624028f48a8a91704acccd6a8c12927addaab75881d25b81db1b8cca77" gracePeriod=2 Dec 08 22:15:35 crc kubenswrapper[4912]: I1208 22:15:35.887720 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-2jsnr/must-gather-ll99v"] Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.438274 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-2jsnr_must-gather-ll99v_f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701/copy/0.log" Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.439560 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2jsnr/must-gather-ll99v" Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.458573 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" path="/var/lib/kubelet/pods/690b4ff7-b08c-40a4-8741-b5b01df4b3d5/volumes" Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.479477 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701-must-gather-output\") pod \"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701\" (UID: \"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701\") " Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.479551 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jn6b4\" (UniqueName: \"kubernetes.io/projected/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701-kube-api-access-jn6b4\") pod \"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701\" (UID: \"f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701\") " Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.495835 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701-kube-api-access-jn6b4" (OuterVolumeSpecName: "kube-api-access-jn6b4") pod "f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701" (UID: "f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701"). InnerVolumeSpecName "kube-api-access-jn6b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.582101 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jn6b4\" (UniqueName: \"kubernetes.io/projected/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701-kube-api-access-jn6b4\") on node \"crc\" DevicePath \"\"" Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.686182 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701" (UID: "f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.786552 4912 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.915580 4912 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-2jsnr_must-gather-ll99v_f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701/copy/0.log" Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.915960 4912 generic.go:334] "Generic (PLEG): container finished" podID="f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701" containerID="57fe59624028f48a8a91704acccd6a8c12927addaab75881d25b81db1b8cca77" exitCode=143 Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.916045 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2jsnr/must-gather-ll99v" Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.916055 4912 scope.go:117] "RemoveContainer" containerID="57fe59624028f48a8a91704acccd6a8c12927addaab75881d25b81db1b8cca77" Dec 08 22:15:36 crc kubenswrapper[4912]: I1208 22:15:36.942495 4912 scope.go:117] "RemoveContainer" containerID="cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd" Dec 08 22:15:37 crc kubenswrapper[4912]: I1208 22:15:37.015836 4912 scope.go:117] "RemoveContainer" containerID="57fe59624028f48a8a91704acccd6a8c12927addaab75881d25b81db1b8cca77" Dec 08 22:15:37 crc kubenswrapper[4912]: E1208 22:15:37.016285 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57fe59624028f48a8a91704acccd6a8c12927addaab75881d25b81db1b8cca77\": container with ID starting with 57fe59624028f48a8a91704acccd6a8c12927addaab75881d25b81db1b8cca77 not found: ID does not exist" containerID="57fe59624028f48a8a91704acccd6a8c12927addaab75881d25b81db1b8cca77" Dec 08 22:15:37 crc kubenswrapper[4912]: I1208 22:15:37.016326 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57fe59624028f48a8a91704acccd6a8c12927addaab75881d25b81db1b8cca77"} err="failed to get container status \"57fe59624028f48a8a91704acccd6a8c12927addaab75881d25b81db1b8cca77\": rpc error: code = NotFound desc = could not find container \"57fe59624028f48a8a91704acccd6a8c12927addaab75881d25b81db1b8cca77\": container with ID starting with 57fe59624028f48a8a91704acccd6a8c12927addaab75881d25b81db1b8cca77 not found: ID does not exist" Dec 08 22:15:37 crc kubenswrapper[4912]: I1208 22:15:37.016356 4912 scope.go:117] "RemoveContainer" containerID="cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd" Dec 08 22:15:37 crc kubenswrapper[4912]: E1208 22:15:37.016773 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd\": container with ID starting with cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd not found: ID does not exist" containerID="cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd" Dec 08 22:15:37 crc kubenswrapper[4912]: I1208 22:15:37.016801 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd"} err="failed to get container status \"cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd\": rpc error: code = NotFound desc = could not find container \"cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd\": container with ID starting with cc43937a1c58d0950650108dafa821000fd6b4dc2e34556f9b48a572a8e8fabd not found: ID does not exist" Dec 08 22:15:38 crc kubenswrapper[4912]: I1208 22:15:38.449419 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701" path="/var/lib/kubelet/pods/f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701/volumes" Dec 08 22:15:41 crc kubenswrapper[4912]: I1208 22:15:41.428342 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:15:41 crc kubenswrapper[4912]: I1208 22:15:41.963013 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"fd774c52fbcb8b2fb577a29895471f0f041ff245f992fca564777fcf5abd19b1"} Dec 08 22:15:51 crc kubenswrapper[4912]: I1208 22:15:51.132338 4912 scope.go:117] "RemoveContainer" containerID="5825af5c0710d86d981d66a5d877dda8d9a59849de68d05aca05b478c63cbb05" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.669725 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mjjlj"] Dec 08 22:16:38 crc kubenswrapper[4912]: E1208 22:16:38.670801 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" containerName="extract-utilities" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.670818 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" containerName="extract-utilities" Dec 08 22:16:38 crc kubenswrapper[4912]: E1208 22:16:38.670855 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701" containerName="gather" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.670863 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701" containerName="gather" Dec 08 22:16:38 crc kubenswrapper[4912]: E1208 22:16:38.670887 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" containerName="registry-server" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.670896 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" containerName="registry-server" Dec 08 22:16:38 crc kubenswrapper[4912]: E1208 22:16:38.670913 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" containerName="extract-content" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.670921 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" containerName="extract-content" Dec 08 22:16:38 crc kubenswrapper[4912]: E1208 22:16:38.670937 4912 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701" containerName="copy" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.670944 4912 state_mem.go:107] "Deleted CPUSet assignment" podUID="f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701" containerName="copy" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.671162 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701" containerName="copy" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.671191 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="690b4ff7-b08c-40a4-8741-b5b01df4b3d5" containerName="registry-server" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.671204 4912 memory_manager.go:354] "RemoveStaleState removing state" podUID="f068ed0e-18c8-41d9-8c4a-7ebdbc9cd701" containerName="gather" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.672876 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.676405 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgtl8\" (UniqueName: \"kubernetes.io/projected/695f0d63-bc8c-45bd-a5c3-1f56498c9366-kube-api-access-zgtl8\") pod \"certified-operators-mjjlj\" (UID: \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\") " pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.676534 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/695f0d63-bc8c-45bd-a5c3-1f56498c9366-utilities\") pod \"certified-operators-mjjlj\" (UID: \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\") " pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.676816 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/695f0d63-bc8c-45bd-a5c3-1f56498c9366-catalog-content\") pod \"certified-operators-mjjlj\" (UID: \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\") " pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.684966 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mjjlj"] Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.778415 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/695f0d63-bc8c-45bd-a5c3-1f56498c9366-catalog-content\") pod \"certified-operators-mjjlj\" (UID: \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\") " pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.778503 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgtl8\" (UniqueName: \"kubernetes.io/projected/695f0d63-bc8c-45bd-a5c3-1f56498c9366-kube-api-access-zgtl8\") pod \"certified-operators-mjjlj\" (UID: \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\") " pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.779118 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/695f0d63-bc8c-45bd-a5c3-1f56498c9366-catalog-content\") pod \"certified-operators-mjjlj\" (UID: \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\") " pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.779404 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/695f0d63-bc8c-45bd-a5c3-1f56498c9366-utilities\") pod \"certified-operators-mjjlj\" (UID: \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\") " pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.779651 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/695f0d63-bc8c-45bd-a5c3-1f56498c9366-utilities\") pod \"certified-operators-mjjlj\" (UID: \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\") " pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:38 crc kubenswrapper[4912]: I1208 22:16:38.797259 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgtl8\" (UniqueName: \"kubernetes.io/projected/695f0d63-bc8c-45bd-a5c3-1f56498c9366-kube-api-access-zgtl8\") pod \"certified-operators-mjjlj\" (UID: \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\") " pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:39 crc kubenswrapper[4912]: I1208 22:16:39.010647 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:39 crc kubenswrapper[4912]: I1208 22:16:39.508565 4912 generic.go:334] "Generic (PLEG): container finished" podID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" exitCode=1 Dec 08 22:16:39 crc kubenswrapper[4912]: I1208 22:16:39.509174 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" event={"ID":"17fc5fff-819f-4786-8e6d-9a0d6265e8ce","Type":"ContainerDied","Data":"7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5"} Dec 08 22:16:39 crc kubenswrapper[4912]: I1208 22:16:39.509307 4912 scope.go:117] "RemoveContainer" containerID="b2406316f5bb72ce789158e95e01fcc66066af6425f7241e9cc28de30ddd8fcb" Dec 08 22:16:39 crc kubenswrapper[4912]: I1208 22:16:39.510117 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:16:39 crc kubenswrapper[4912]: E1208 22:16:39.510439 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:16:39 crc kubenswrapper[4912]: I1208 22:16:39.637971 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mjjlj"] Dec 08 22:16:40 crc kubenswrapper[4912]: I1208 22:16:40.524150 4912 generic.go:334] "Generic (PLEG): container finished" podID="695f0d63-bc8c-45bd-a5c3-1f56498c9366" containerID="34df462aa37acac63e5e7dbef42f18eebe211dc8e0756540db05343299e3af07" exitCode=0 Dec 08 22:16:40 crc kubenswrapper[4912]: I1208 22:16:40.524393 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjjlj" event={"ID":"695f0d63-bc8c-45bd-a5c3-1f56498c9366","Type":"ContainerDied","Data":"34df462aa37acac63e5e7dbef42f18eebe211dc8e0756540db05343299e3af07"} Dec 08 22:16:40 crc kubenswrapper[4912]: I1208 22:16:40.524476 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjjlj" event={"ID":"695f0d63-bc8c-45bd-a5c3-1f56498c9366","Type":"ContainerStarted","Data":"6c8d9ad8507a4b2028fc806f5bdc57066b474f0f31e740e9ca8f7c980b0228cf"} Dec 08 22:16:40 crc kubenswrapper[4912]: I1208 22:16:40.526770 4912 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 22:16:41 crc kubenswrapper[4912]: I1208 22:16:41.540285 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjjlj" event={"ID":"695f0d63-bc8c-45bd-a5c3-1f56498c9366","Type":"ContainerStarted","Data":"175c9e86f3e96ccd20d8b4d31a23057c101e57686157af0734318c467792de96"} Dec 08 22:16:42 crc kubenswrapper[4912]: I1208 22:16:42.551410 4912 generic.go:334] "Generic (PLEG): container finished" podID="695f0d63-bc8c-45bd-a5c3-1f56498c9366" containerID="175c9e86f3e96ccd20d8b4d31a23057c101e57686157af0734318c467792de96" exitCode=0 Dec 08 22:16:42 crc kubenswrapper[4912]: I1208 22:16:42.551488 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjjlj" event={"ID":"695f0d63-bc8c-45bd-a5c3-1f56498c9366","Type":"ContainerDied","Data":"175c9e86f3e96ccd20d8b4d31a23057c101e57686157af0734318c467792de96"} Dec 08 22:16:43 crc kubenswrapper[4912]: I1208 22:16:43.562163 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjjlj" event={"ID":"695f0d63-bc8c-45bd-a5c3-1f56498c9366","Type":"ContainerStarted","Data":"c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88"} Dec 08 22:16:43 crc kubenswrapper[4912]: I1208 22:16:43.583757 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mjjlj" podStartSLOduration=3.168363175 podStartE2EDuration="5.583738007s" podCreationTimestamp="2025-12-08 22:16:38 +0000 UTC" firstStartedPulling="2025-12-08 22:16:40.526424656 +0000 UTC m=+3482.389426739" lastFinishedPulling="2025-12-08 22:16:42.941799488 +0000 UTC m=+3484.804801571" observedRunningTime="2025-12-08 22:16:43.579582255 +0000 UTC m=+3485.442584338" watchObservedRunningTime="2025-12-08 22:16:43.583738007 +0000 UTC m=+3485.446740090" Dec 08 22:16:45 crc kubenswrapper[4912]: I1208 22:16:45.249692 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 22:16:45 crc kubenswrapper[4912]: I1208 22:16:45.250019 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" Dec 08 22:16:45 crc kubenswrapper[4912]: I1208 22:16:45.250795 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:16:45 crc kubenswrapper[4912]: E1208 22:16:45.251030 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.041878 4912 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-s7bnt"] Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.044439 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.057217 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s7bnt"] Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.219488 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-catalog-content\") pod \"redhat-marketplace-s7bnt\" (UID: \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\") " pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.219588 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qchsf\" (UniqueName: \"kubernetes.io/projected/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-kube-api-access-qchsf\") pod \"redhat-marketplace-s7bnt\" (UID: \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\") " pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.219639 4912 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-utilities\") pod \"redhat-marketplace-s7bnt\" (UID: \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\") " pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.321803 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-catalog-content\") pod \"redhat-marketplace-s7bnt\" (UID: \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\") " pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.321933 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qchsf\" (UniqueName: \"kubernetes.io/projected/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-kube-api-access-qchsf\") pod \"redhat-marketplace-s7bnt\" (UID: \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\") " pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.322016 4912 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-utilities\") pod \"redhat-marketplace-s7bnt\" (UID: \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\") " pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.322336 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-catalog-content\") pod \"redhat-marketplace-s7bnt\" (UID: \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\") " pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.322698 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-utilities\") pod \"redhat-marketplace-s7bnt\" (UID: \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\") " pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.343439 4912 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qchsf\" (UniqueName: \"kubernetes.io/projected/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-kube-api-access-qchsf\") pod \"redhat-marketplace-s7bnt\" (UID: \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\") " pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.371946 4912 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:46 crc kubenswrapper[4912]: I1208 22:16:46.844755 4912 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s7bnt"] Dec 08 22:16:46 crc kubenswrapper[4912]: W1208 22:16:46.846719 4912 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbbf2695c_11a9_4fd5_8d47_6dbef58cd41e.slice/crio-c2bb934d9e287a522a452f32f9a42e3d11d48acc0b8ad3e7a0e3253ea5d2849d WatchSource:0}: Error finding container c2bb934d9e287a522a452f32f9a42e3d11d48acc0b8ad3e7a0e3253ea5d2849d: Status 404 returned error can't find the container with id c2bb934d9e287a522a452f32f9a42e3d11d48acc0b8ad3e7a0e3253ea5d2849d Dec 08 22:16:47 crc kubenswrapper[4912]: I1208 22:16:47.620304 4912 generic.go:334] "Generic (PLEG): container finished" podID="bbf2695c-11a9-4fd5-8d47-6dbef58cd41e" containerID="0aefab6a9584e28c83cfe251b42eb9993e423b12c9d90323641a07a4c5319ea2" exitCode=0 Dec 08 22:16:47 crc kubenswrapper[4912]: I1208 22:16:47.620370 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7bnt" event={"ID":"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e","Type":"ContainerDied","Data":"0aefab6a9584e28c83cfe251b42eb9993e423b12c9d90323641a07a4c5319ea2"} Dec 08 22:16:47 crc kubenswrapper[4912]: I1208 22:16:47.620660 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7bnt" event={"ID":"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e","Type":"ContainerStarted","Data":"c2bb934d9e287a522a452f32f9a42e3d11d48acc0b8ad3e7a0e3253ea5d2849d"} Dec 08 22:16:48 crc kubenswrapper[4912]: I1208 22:16:48.631959 4912 generic.go:334] "Generic (PLEG): container finished" podID="bbf2695c-11a9-4fd5-8d47-6dbef58cd41e" containerID="76ddf056c7f0d35291eb28a6898c2a4512e8ca13bd039b16ef18cf4ea5b2e4ca" exitCode=0 Dec 08 22:16:48 crc kubenswrapper[4912]: I1208 22:16:48.632123 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7bnt" event={"ID":"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e","Type":"ContainerDied","Data":"76ddf056c7f0d35291eb28a6898c2a4512e8ca13bd039b16ef18cf4ea5b2e4ca"} Dec 08 22:16:49 crc kubenswrapper[4912]: I1208 22:16:49.011223 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:49 crc kubenswrapper[4912]: I1208 22:16:49.011517 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:49 crc kubenswrapper[4912]: I1208 22:16:49.057745 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:49 crc kubenswrapper[4912]: I1208 22:16:49.644701 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7bnt" event={"ID":"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e","Type":"ContainerStarted","Data":"7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392"} Dec 08 22:16:49 crc kubenswrapper[4912]: I1208 22:16:49.663792 4912 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-s7bnt" podStartSLOduration=2.243361983 podStartE2EDuration="3.663774308s" podCreationTimestamp="2025-12-08 22:16:46 +0000 UTC" firstStartedPulling="2025-12-08 22:16:47.62367439 +0000 UTC m=+3489.486676473" lastFinishedPulling="2025-12-08 22:16:49.044086715 +0000 UTC m=+3490.907088798" observedRunningTime="2025-12-08 22:16:49.662625569 +0000 UTC m=+3491.525627652" watchObservedRunningTime="2025-12-08 22:16:49.663774308 +0000 UTC m=+3491.526776391" Dec 08 22:16:49 crc kubenswrapper[4912]: I1208 22:16:49.699473 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:51 crc kubenswrapper[4912]: I1208 22:16:51.213331 4912 scope.go:117] "RemoveContainer" containerID="08723d7734d63d2a0bc2a0e04c5f394a05ce674b95b716f726481c4910c1c90b" Dec 08 22:16:51 crc kubenswrapper[4912]: I1208 22:16:51.431836 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mjjlj"] Dec 08 22:16:51 crc kubenswrapper[4912]: I1208 22:16:51.663390 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mjjlj" podUID="695f0d63-bc8c-45bd-a5c3-1f56498c9366" containerName="registry-server" containerID="cri-o://c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88" gracePeriod=2 Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.326915 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.452390 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgtl8\" (UniqueName: \"kubernetes.io/projected/695f0d63-bc8c-45bd-a5c3-1f56498c9366-kube-api-access-zgtl8\") pod \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\" (UID: \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\") " Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.452452 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/695f0d63-bc8c-45bd-a5c3-1f56498c9366-utilities\") pod \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\" (UID: \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\") " Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.452568 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/695f0d63-bc8c-45bd-a5c3-1f56498c9366-catalog-content\") pod \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\" (UID: \"695f0d63-bc8c-45bd-a5c3-1f56498c9366\") " Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.453695 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/695f0d63-bc8c-45bd-a5c3-1f56498c9366-utilities" (OuterVolumeSpecName: "utilities") pod "695f0d63-bc8c-45bd-a5c3-1f56498c9366" (UID: "695f0d63-bc8c-45bd-a5c3-1f56498c9366"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.458853 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/695f0d63-bc8c-45bd-a5c3-1f56498c9366-kube-api-access-zgtl8" (OuterVolumeSpecName: "kube-api-access-zgtl8") pod "695f0d63-bc8c-45bd-a5c3-1f56498c9366" (UID: "695f0d63-bc8c-45bd-a5c3-1f56498c9366"). InnerVolumeSpecName "kube-api-access-zgtl8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.507109 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/695f0d63-bc8c-45bd-a5c3-1f56498c9366-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "695f0d63-bc8c-45bd-a5c3-1f56498c9366" (UID: "695f0d63-bc8c-45bd-a5c3-1f56498c9366"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.555194 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgtl8\" (UniqueName: \"kubernetes.io/projected/695f0d63-bc8c-45bd-a5c3-1f56498c9366-kube-api-access-zgtl8\") on node \"crc\" DevicePath \"\"" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.555231 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/695f0d63-bc8c-45bd-a5c3-1f56498c9366-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.555243 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/695f0d63-bc8c-45bd-a5c3-1f56498c9366-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.674506 4912 generic.go:334] "Generic (PLEG): container finished" podID="695f0d63-bc8c-45bd-a5c3-1f56498c9366" containerID="c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88" exitCode=0 Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.674546 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjjlj" event={"ID":"695f0d63-bc8c-45bd-a5c3-1f56498c9366","Type":"ContainerDied","Data":"c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88"} Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.674867 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjjlj" event={"ID":"695f0d63-bc8c-45bd-a5c3-1f56498c9366","Type":"ContainerDied","Data":"6c8d9ad8507a4b2028fc806f5bdc57066b474f0f31e740e9ca8f7c980b0228cf"} Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.674912 4912 scope.go:117] "RemoveContainer" containerID="c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.674585 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mjjlj" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.693097 4912 scope.go:117] "RemoveContainer" containerID="175c9e86f3e96ccd20d8b4d31a23057c101e57686157af0734318c467792de96" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.715096 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mjjlj"] Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.720774 4912 scope.go:117] "RemoveContainer" containerID="34df462aa37acac63e5e7dbef42f18eebe211dc8e0756540db05343299e3af07" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.722978 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mjjlj"] Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.754623 4912 scope.go:117] "RemoveContainer" containerID="c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88" Dec 08 22:16:52 crc kubenswrapper[4912]: E1208 22:16:52.755103 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88\": container with ID starting with c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88 not found: ID does not exist" containerID="c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.755159 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88"} err="failed to get container status \"c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88\": rpc error: code = NotFound desc = could not find container \"c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88\": container with ID starting with c1bd5be7e9b5bc72bf7f64887c1ce43ed5afd54c3081f0e4a0d72b0c338fef88 not found: ID does not exist" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.755198 4912 scope.go:117] "RemoveContainer" containerID="175c9e86f3e96ccd20d8b4d31a23057c101e57686157af0734318c467792de96" Dec 08 22:16:52 crc kubenswrapper[4912]: E1208 22:16:52.755651 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"175c9e86f3e96ccd20d8b4d31a23057c101e57686157af0734318c467792de96\": container with ID starting with 175c9e86f3e96ccd20d8b4d31a23057c101e57686157af0734318c467792de96 not found: ID does not exist" containerID="175c9e86f3e96ccd20d8b4d31a23057c101e57686157af0734318c467792de96" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.755706 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"175c9e86f3e96ccd20d8b4d31a23057c101e57686157af0734318c467792de96"} err="failed to get container status \"175c9e86f3e96ccd20d8b4d31a23057c101e57686157af0734318c467792de96\": rpc error: code = NotFound desc = could not find container \"175c9e86f3e96ccd20d8b4d31a23057c101e57686157af0734318c467792de96\": container with ID starting with 175c9e86f3e96ccd20d8b4d31a23057c101e57686157af0734318c467792de96 not found: ID does not exist" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.755738 4912 scope.go:117] "RemoveContainer" containerID="34df462aa37acac63e5e7dbef42f18eebe211dc8e0756540db05343299e3af07" Dec 08 22:16:52 crc kubenswrapper[4912]: E1208 22:16:52.756076 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34df462aa37acac63e5e7dbef42f18eebe211dc8e0756540db05343299e3af07\": container with ID starting with 34df462aa37acac63e5e7dbef42f18eebe211dc8e0756540db05343299e3af07 not found: ID does not exist" containerID="34df462aa37acac63e5e7dbef42f18eebe211dc8e0756540db05343299e3af07" Dec 08 22:16:52 crc kubenswrapper[4912]: I1208 22:16:52.756095 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34df462aa37acac63e5e7dbef42f18eebe211dc8e0756540db05343299e3af07"} err="failed to get container status \"34df462aa37acac63e5e7dbef42f18eebe211dc8e0756540db05343299e3af07\": rpc error: code = NotFound desc = could not find container \"34df462aa37acac63e5e7dbef42f18eebe211dc8e0756540db05343299e3af07\": container with ID starting with 34df462aa37acac63e5e7dbef42f18eebe211dc8e0756540db05343299e3af07 not found: ID does not exist" Dec 08 22:16:54 crc kubenswrapper[4912]: I1208 22:16:54.439857 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="695f0d63-bc8c-45bd-a5c3-1f56498c9366" path="/var/lib/kubelet/pods/695f0d63-bc8c-45bd-a5c3-1f56498c9366/volumes" Dec 08 22:16:56 crc kubenswrapper[4912]: I1208 22:16:56.372276 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:56 crc kubenswrapper[4912]: I1208 22:16:56.372629 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:56 crc kubenswrapper[4912]: I1208 22:16:56.419859 4912 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:56 crc kubenswrapper[4912]: I1208 22:16:56.758543 4912 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:56 crc kubenswrapper[4912]: I1208 22:16:56.808977 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s7bnt"] Dec 08 22:16:58 crc kubenswrapper[4912]: I1208 22:16:58.433363 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:16:58 crc kubenswrapper[4912]: E1208 22:16:58.433699 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:16:58 crc kubenswrapper[4912]: I1208 22:16:58.725067 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-s7bnt" podUID="bbf2695c-11a9-4fd5-8d47-6dbef58cd41e" containerName="registry-server" containerID="cri-o://7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392" gracePeriod=2 Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.477460 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.583232 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qchsf\" (UniqueName: \"kubernetes.io/projected/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-kube-api-access-qchsf\") pod \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\" (UID: \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\") " Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.583372 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-utilities\") pod \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\" (UID: \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\") " Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.583436 4912 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-catalog-content\") pod \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\" (UID: \"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e\") " Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.584414 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-utilities" (OuterVolumeSpecName: "utilities") pod "bbf2695c-11a9-4fd5-8d47-6dbef58cd41e" (UID: "bbf2695c-11a9-4fd5-8d47-6dbef58cd41e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.589283 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-kube-api-access-qchsf" (OuterVolumeSpecName: "kube-api-access-qchsf") pod "bbf2695c-11a9-4fd5-8d47-6dbef58cd41e" (UID: "bbf2695c-11a9-4fd5-8d47-6dbef58cd41e"). InnerVolumeSpecName "kube-api-access-qchsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.605314 4912 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bbf2695c-11a9-4fd5-8d47-6dbef58cd41e" (UID: "bbf2695c-11a9-4fd5-8d47-6dbef58cd41e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.686152 4912 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.686417 4912 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.686601 4912 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qchsf\" (UniqueName: \"kubernetes.io/projected/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e-kube-api-access-qchsf\") on node \"crc\" DevicePath \"\"" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.735260 4912 generic.go:334] "Generic (PLEG): container finished" podID="bbf2695c-11a9-4fd5-8d47-6dbef58cd41e" containerID="7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392" exitCode=0 Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.735330 4912 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s7bnt" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.735350 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7bnt" event={"ID":"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e","Type":"ContainerDied","Data":"7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392"} Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.735691 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7bnt" event={"ID":"bbf2695c-11a9-4fd5-8d47-6dbef58cd41e","Type":"ContainerDied","Data":"c2bb934d9e287a522a452f32f9a42e3d11d48acc0b8ad3e7a0e3253ea5d2849d"} Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.735717 4912 scope.go:117] "RemoveContainer" containerID="7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.753235 4912 scope.go:117] "RemoveContainer" containerID="76ddf056c7f0d35291eb28a6898c2a4512e8ca13bd039b16ef18cf4ea5b2e4ca" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.773241 4912 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s7bnt"] Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.782557 4912 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-s7bnt"] Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.799836 4912 scope.go:117] "RemoveContainer" containerID="0aefab6a9584e28c83cfe251b42eb9993e423b12c9d90323641a07a4c5319ea2" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.830909 4912 scope.go:117] "RemoveContainer" containerID="7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392" Dec 08 22:16:59 crc kubenswrapper[4912]: E1208 22:16:59.831303 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392\": container with ID starting with 7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392 not found: ID does not exist" containerID="7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.831332 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392"} err="failed to get container status \"7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392\": rpc error: code = NotFound desc = could not find container \"7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392\": container with ID starting with 7c8c7f424a61d83b05394cd5a8abb8789cb6585e2d7732eb5c59f78927d8d392 not found: ID does not exist" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.831355 4912 scope.go:117] "RemoveContainer" containerID="76ddf056c7f0d35291eb28a6898c2a4512e8ca13bd039b16ef18cf4ea5b2e4ca" Dec 08 22:16:59 crc kubenswrapper[4912]: E1208 22:16:59.833560 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76ddf056c7f0d35291eb28a6898c2a4512e8ca13bd039b16ef18cf4ea5b2e4ca\": container with ID starting with 76ddf056c7f0d35291eb28a6898c2a4512e8ca13bd039b16ef18cf4ea5b2e4ca not found: ID does not exist" containerID="76ddf056c7f0d35291eb28a6898c2a4512e8ca13bd039b16ef18cf4ea5b2e4ca" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.833597 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76ddf056c7f0d35291eb28a6898c2a4512e8ca13bd039b16ef18cf4ea5b2e4ca"} err="failed to get container status \"76ddf056c7f0d35291eb28a6898c2a4512e8ca13bd039b16ef18cf4ea5b2e4ca\": rpc error: code = NotFound desc = could not find container \"76ddf056c7f0d35291eb28a6898c2a4512e8ca13bd039b16ef18cf4ea5b2e4ca\": container with ID starting with 76ddf056c7f0d35291eb28a6898c2a4512e8ca13bd039b16ef18cf4ea5b2e4ca not found: ID does not exist" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.833625 4912 scope.go:117] "RemoveContainer" containerID="0aefab6a9584e28c83cfe251b42eb9993e423b12c9d90323641a07a4c5319ea2" Dec 08 22:16:59 crc kubenswrapper[4912]: E1208 22:16:59.833998 4912 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0aefab6a9584e28c83cfe251b42eb9993e423b12c9d90323641a07a4c5319ea2\": container with ID starting with 0aefab6a9584e28c83cfe251b42eb9993e423b12c9d90323641a07a4c5319ea2 not found: ID does not exist" containerID="0aefab6a9584e28c83cfe251b42eb9993e423b12c9d90323641a07a4c5319ea2" Dec 08 22:16:59 crc kubenswrapper[4912]: I1208 22:16:59.834024 4912 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0aefab6a9584e28c83cfe251b42eb9993e423b12c9d90323641a07a4c5319ea2"} err="failed to get container status \"0aefab6a9584e28c83cfe251b42eb9993e423b12c9d90323641a07a4c5319ea2\": rpc error: code = NotFound desc = could not find container \"0aefab6a9584e28c83cfe251b42eb9993e423b12c9d90323641a07a4c5319ea2\": container with ID starting with 0aefab6a9584e28c83cfe251b42eb9993e423b12c9d90323641a07a4c5319ea2 not found: ID does not exist" Dec 08 22:17:00 crc kubenswrapper[4912]: I1208 22:17:00.439946 4912 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbf2695c-11a9-4fd5-8d47-6dbef58cd41e" path="/var/lib/kubelet/pods/bbf2695c-11a9-4fd5-8d47-6dbef58cd41e/volumes" Dec 08 22:17:10 crc kubenswrapper[4912]: I1208 22:17:10.427755 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:17:10 crc kubenswrapper[4912]: E1208 22:17:10.429955 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:17:25 crc kubenswrapper[4912]: I1208 22:17:25.428367 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:17:25 crc kubenswrapper[4912]: E1208 22:17:25.429169 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:17:39 crc kubenswrapper[4912]: I1208 22:17:39.429325 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:17:39 crc kubenswrapper[4912]: E1208 22:17:39.431025 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:17:51 crc kubenswrapper[4912]: I1208 22:17:51.428924 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:17:51 crc kubenswrapper[4912]: E1208 22:17:51.429766 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:18:02 crc kubenswrapper[4912]: I1208 22:18:02.966252 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:18:02 crc kubenswrapper[4912]: I1208 22:18:02.966856 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:18:03 crc kubenswrapper[4912]: I1208 22:18:03.427398 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:18:03 crc kubenswrapper[4912]: E1208 22:18:03.427716 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:18:15 crc kubenswrapper[4912]: I1208 22:18:15.428218 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:18:15 crc kubenswrapper[4912]: E1208 22:18:15.429044 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:18:29 crc kubenswrapper[4912]: I1208 22:18:29.427965 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:18:29 crc kubenswrapper[4912]: E1208 22:18:29.428699 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:18:32 crc kubenswrapper[4912]: I1208 22:18:32.964822 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:18:32 crc kubenswrapper[4912]: I1208 22:18:32.965420 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:18:42 crc kubenswrapper[4912]: I1208 22:18:42.428000 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:18:42 crc kubenswrapper[4912]: E1208 22:18:42.428729 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:18:55 crc kubenswrapper[4912]: I1208 22:18:55.427862 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:18:55 crc kubenswrapper[4912]: E1208 22:18:55.428681 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:19:02 crc kubenswrapper[4912]: I1208 22:19:02.964850 4912 patch_prober.go:28] interesting pod/machine-config-daemon-74dp4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 22:19:02 crc kubenswrapper[4912]: I1208 22:19:02.965323 4912 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 22:19:02 crc kubenswrapper[4912]: I1208 22:19:02.965363 4912 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" Dec 08 22:19:02 crc kubenswrapper[4912]: I1208 22:19:02.966010 4912 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fd774c52fbcb8b2fb577a29895471f0f041ff245f992fca564777fcf5abd19b1"} pod="openshift-machine-config-operator/machine-config-daemon-74dp4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 22:19:02 crc kubenswrapper[4912]: I1208 22:19:02.967203 4912 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" podUID="831b06bd-095f-439f-a166-088c2d584933" containerName="machine-config-daemon" containerID="cri-o://fd774c52fbcb8b2fb577a29895471f0f041ff245f992fca564777fcf5abd19b1" gracePeriod=600 Dec 08 22:19:03 crc kubenswrapper[4912]: I1208 22:19:03.857886 4912 generic.go:334] "Generic (PLEG): container finished" podID="831b06bd-095f-439f-a166-088c2d584933" containerID="fd774c52fbcb8b2fb577a29895471f0f041ff245f992fca564777fcf5abd19b1" exitCode=0 Dec 08 22:19:03 crc kubenswrapper[4912]: I1208 22:19:03.857967 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerDied","Data":"fd774c52fbcb8b2fb577a29895471f0f041ff245f992fca564777fcf5abd19b1"} Dec 08 22:19:03 crc kubenswrapper[4912]: I1208 22:19:03.858232 4912 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-74dp4" event={"ID":"831b06bd-095f-439f-a166-088c2d584933","Type":"ContainerStarted","Data":"37c800177f69dec231909d5b9612a8734845bbef88ffd2a17538b41391335e54"} Dec 08 22:19:03 crc kubenswrapper[4912]: I1208 22:19:03.858253 4912 scope.go:117] "RemoveContainer" containerID="b91d01e9857dfb38f46a1339c28d8b102f2b3c663f09f2dd90ba8899f55d7db4" Dec 08 22:19:10 crc kubenswrapper[4912]: I1208 22:19:10.428971 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:19:10 crc kubenswrapper[4912]: E1208 22:19:10.429834 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:19:24 crc kubenswrapper[4912]: I1208 22:19:24.428535 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:19:24 crc kubenswrapper[4912]: E1208 22:19:24.429368 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:19:37 crc kubenswrapper[4912]: I1208 22:19:37.427658 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:19:37 crc kubenswrapper[4912]: E1208 22:19:37.428403 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:19:50 crc kubenswrapper[4912]: I1208 22:19:50.428106 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:19:50 crc kubenswrapper[4912]: E1208 22:19:50.428868 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" Dec 08 22:20:04 crc kubenswrapper[4912]: I1208 22:20:04.427921 4912 scope.go:117] "RemoveContainer" containerID="7ceda3a8f9507c4b58cc593f433b0ed9c40d9bd13ce0322311d83a3eff3094e5" Dec 08 22:20:04 crc kubenswrapper[4912]: E1208 22:20:04.429427 4912 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=manager pod=telemetry-operator-controller-manager-65f6d9c768-xfcfd_openstack-operators(17fc5fff-819f-4786-8e6d-9a0d6265e8ce)\"" pod="openstack-operators/telemetry-operator-controller-manager-65f6d9c768-xfcfd" podUID="17fc5fff-819f-4786-8e6d-9a0d6265e8ce" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515115647440024454 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015115647441017372 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015115637750016517 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015115637750015467 5ustar corecore